Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/logs/users-5-7.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-22226 + local ns=users-22226 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-305 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.wQugoc44l9 ++ mktemp + local LAST_ERR=/tmp/tmp.ojBeZzz4e5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wQugoc44l9 perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-305 namespace + cat /tmp/tmp.ojBeZzz4e5 + rm /tmp/tmp.wQugoc44l9 /tmp/tmp.ojBeZzz4e5 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.rzyiOnEIrg ++ mktemp + local LAST_ERR=/tmp/tmp.i3QsO3k719 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rzyiOnEIrg No resources found + cat /tmp/tmp.i3QsO3k719 + rm /tmp/tmp.rzyiOnEIrg /tmp/tmp.i3QsO3k719 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.oABcB50l7l ++ mktemp + local LAST_ERR=/tmp/tmp.HuJiTUlqdU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oABcB50l7l No resources found + cat /tmp/tmp.HuJiTUlqdU + rm /tmp/tmp.oABcB50l7l /tmp/tmp.HuJiTUlqdU + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + xargs kubectl delete ns + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.RIRKBWkBn4 ++ mktemp + local LAST_OUT=/tmp/tmp.D1IwOj1lQY ++ mktemp + local LAST_ERR=/tmp/tmp.quIw1fIbTm + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.LFha1WMi62 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RIRKBWkBn4 + cat /tmp/tmp.quIw1fIbTm + rm /tmp/tmp.RIRKBWkBn4 /tmp/tmp.quIw1fIbTm + return 0 namespace "users-305" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.D1IwOj1lQY namespace "pxc-operator" deleted + cat /tmp/tmp.LFha1WMi62 + rm /tmp/tmp.D1IwOj1lQY /tmp/tmp.LFha1WMi62 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.SeS2I5nwdq ++ mktemp + local LAST_ERR=/tmp/tmp.SE0sRkBCKr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SeS2I5nwdq namespace/pxc-operator created + cat /tmp/tmp.SE0sRkBCKr + rm /tmp/tmp.SeS2I5nwdq /tmp/tmp.SE0sRkBCKr + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.3M06OjoAZO +++ mktemp ++ local LAST_ERR=/tmp/tmp.1jrLmiUOSX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3M06OjoAZO ++ cat /tmp/tmp.1jrLmiUOSX ++ rm /tmp/tmp.3M06OjoAZO /tmp/tmp.1jrLmiUOSX ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2193-1eb37b20-11-cluster2 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.oxpekQQxae ++ mktemp + local LAST_ERR=/tmp/tmp.tLRfmQmKzU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2193-1eb37b20-11-cluster2 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oxpekQQxae Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2193-1eb37b20-11-cluster2" modified. + cat /tmp/tmp.tLRfmQmKzU + rm /tmp/tmp.oxpekQQxae /tmp/tmp.tLRfmQmKzU + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.hKx40QsNIX ++ mktemp + local LAST_ERR=/tmp/tmp.c01mNiour9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hKx40QsNIX customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.c01mNiour9 + rm /tmp/tmp.hKx40QsNIX /tmp/tmp.c01mNiour9 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.CeLAkKouJO ++ mktemp + local LAST_ERR=/tmp/tmp.GyZ5i0sfCV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CeLAkKouJO clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.GyZ5i0sfCV + rm /tmp/tmp.CeLAkKouJO /tmp/tmp.GyZ5i0sfCV + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2193-1eb37b20^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - ++ mktemp + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/deploy/cw-operator.yaml + local LAST_OUT=/tmp/tmp.hgDNBd3cZ2 ++ mktemp + local LAST_ERR=/tmp/tmp.ErDTLO94Df + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hgDNBd3cZ2 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.ErDTLO94Df + rm /tmp/tmp.hgDNBd3cZ2 /tmp/tmp.ErDTLO94Df + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.vGgSXdaOqF ++ mktemp + local LAST_ERR=/tmp/tmp.lBx4kUxsqr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vGgSXdaOqF pod/percona-xtradb-cluster-operator-75d958d548-5877h condition met + cat /tmp/tmp.lBx4kUxsqr + rm /tmp/tmp.vGgSXdaOqF /tmp/tmp.lBx4kUxsqr + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.wiqtfjsnPj +++ mktemp ++ local LAST_ERR=/tmp/tmp.usAShRPoZ0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wiqtfjsnPj ++ cat /tmp/tmp.usAShRPoZ0 ++ rm /tmp/tmp.wiqtfjsnPj /tmp/tmp.usAShRPoZ0 ++ return 0 + wait_pod percona-xtradb-cluster-operator-75d958d548-5877h 480 pxc-operator + local pod=percona-xtradb-cluster-operator-75d958d548-5877h + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-75d958d548-5877h ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-75d958d548-5877h condition met waiting for pod/percona-xtradb-cluster-operator-75d958d548-5877h to become Ready.Ok + sleep 3 + create_namespace users-22226 + local namespace=users-22226 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-22226' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-22226 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-22226 + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.fSbO5YMaek ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.WMwy0lXJI1 + local LAST_ERR=/tmp/tmp.1c7gL2iNAa + local exit_status=0 ++ seq 0 2 ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-22226 + local LAST_ERR=/tmp/tmp.INcDpkaAbG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WMwy0lXJI1 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-22226 + cat /tmp/tmp.INcDpkaAbG + rm /tmp/tmp.WMwy0lXJI1 /tmp/tmp.INcDpkaAbG + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-22226 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.fSbO5YMaek + cat /tmp/tmp.1c7gL2iNAa Error from server (NotFound): namespaces "users-22226" not found + rm /tmp/tmp.fSbO5YMaek /tmp/tmp.1c7gL2iNAa + return 1 + : + wait_for_delete namespace/users-22226 + local res=namespace/users-22226 + echo -n 'waiting for namespace/users-22226 to be deleted' waiting for namespace/users-22226 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-22226" not found + desc 'create namespace users-22226' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-22226 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-22226 ++ mktemp + local LAST_OUT=/tmp/tmp.8byyEUtski ++ mktemp + local LAST_ERR=/tmp/tmp.2Uc0fPQf7J + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-22226 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8byyEUtski namespace/users-22226 created + cat /tmp/tmp.2Uc0fPQf7J + rm /tmp/tmp.8byyEUtski /tmp/tmp.2Uc0fPQf7J + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.qkmOOeIdAw +++ mktemp ++ local LAST_ERR=/tmp/tmp.VS1vK066X5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qkmOOeIdAw ++ cat /tmp/tmp.VS1vK066X5 ++ rm /tmp/tmp.qkmOOeIdAw /tmp/tmp.VS1vK066X5 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2193-1eb37b20-11-cluster2 --namespace=users-22226 ++ mktemp + local LAST_OUT=/tmp/tmp.vc3FxMPG93 ++ mktemp + local LAST_ERR=/tmp/tmp.2cEt6Bmaql + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2193-1eb37b20-11-cluster2 --namespace=users-22226 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vc3FxMPG93 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2193-1eb37b20-11-cluster2" modified. + cat /tmp/tmp.2cEt6Bmaql + rm /tmp/tmp.vc3FxMPG93 /tmp/tmp.2cEt6Bmaql + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.qmP6Zmfqwp ++ mktemp + local LAST_ERR=/tmp/tmp.NJRgrxxpWA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qmP6Zmfqwp secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.NJRgrxxpWA + rm /tmp/tmp.qmP6Zmfqwp /tmp/tmp.NJRgrxxpWA + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Xkk0ucOGCf ++ mktemp + local LAST_ERR=/tmp/tmp.MVLPFlJBy1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Xkk0ucOGCf secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.MVLPFlJBy1 + rm /tmp/tmp.Xkk0ucOGCf /tmp/tmp.MVLPFlJBy1 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/client.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/client.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/client.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.bbPWqjuaPW + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2193-1eb37b20#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.XMVYMojrX1 + local exit_status=0 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-22226~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bbPWqjuaPW deployment.apps/pxc-client created + cat /tmp/tmp.XMVYMojrX1 + rm /tmp/tmp.bbPWqjuaPW /tmp/tmp.XMVYMojrX1 + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2193-1eb37b20#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_OUT=/tmp/tmp.tMEBJwiELh + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-22226~ ++ mktemp + local LAST_ERR=/tmp/tmp.UTe1n01FXY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tMEBJwiELh perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.UTe1n01FXY + rm /tmp/tmp.tMEBJwiELh /tmp/tmp.UTe1n01FXY + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.CRfi2wWyZW ++++ mktemp +++ local LAST_ERR=/tmp/tmp.8LUsIPq8I1 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.CRfi2wWyZW +++ cat /tmp/tmp.8LUsIPq8I1 +++ rm /tmp/tmp.CRfi2wWyZW /tmp/tmp.8LUsIPq8I1 +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.z2xcraZq4d ++++ mktemp +++ local LAST_ERR=/tmp/tmp.LTpJsvdO1B +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.z2xcraZq4d +++ cat /tmp/tmp.LTpJsvdO1B +++ rm /tmp/tmp.z2xcraZq4d /tmp/tmp.LTpJsvdO1B +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-22226 ++ mktemp + local LAST_OUT=/tmp/tmp.zfiS2cCOc4 ++ mktemp + local LAST_ERR=/tmp/tmp.sod2uJs3jS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-22226 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-22226 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-22226 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.zfiS2cCOc4 + cat /tmp/tmp.sod2uJs3jS error: no matching resources found + rm /tmp/tmp.zfiS2cCOc4 /tmp/tmp.sod2uJs3jS + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.gyhlfpEjDH +++ mktemp ++ local LAST_ERR=/tmp/tmp.2JvYJwXkKH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gyhlfpEjDH ++ cat /tmp/tmp.2JvYJwXkKH ++ rm /tmp/tmp.gyhlfpEjDH /tmp/tmp.2JvYJwXkKH ++ return 0 + local 'root_pass=^>]84CpfZXqvn(d?^K' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jToZfkZWIF +++ mktemp ++ local LAST_ERR=/tmp/tmp.yqR2R4QOAo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jToZfkZWIF ++ cat /tmp/tmp.yqR2R4QOAo Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.jToZfkZWIF /tmp/tmp.yqR2R4QOAo ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kLUndnrmva +++ mktemp ++ local LAST_ERR=/tmp/tmp.BEm4zje6Uk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kLUndnrmva ++ cat /tmp/tmp.BEm4zje6Uk ++ rm /tmp/tmp.kLUndnrmva /tmp/tmp.BEm4zje6Uk ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6ijEA7DqMx +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZvkdujNFto ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6ijEA7DqMx ++ cat /tmp/tmp.ZvkdujNFto ++ rm /tmp/tmp.6ijEA7DqMx /tmp/tmp.ZvkdujNFto ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DSd0Er0Oz4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EVl8FxbYAs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DSd0Er0Oz4 ++ cat /tmp/tmp.EVl8FxbYAs ++ rm /tmp/tmp.DSd0Er0Oz4 /tmp/tmp.EVl8FxbYAs ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1.sql /tmp/tmp.OGJBrKbY7x/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.knSpOluGdx +++ mktemp ++ local LAST_ERR=/tmp/tmp.AiScyETt8V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.knSpOluGdx ++ cat /tmp/tmp.AiScyETt8V ++ rm /tmp/tmp.knSpOluGdx /tmp/tmp.AiScyETt8V ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1.sql /tmp/tmp.OGJBrKbY7x/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''^>]84CpfZXqvn(d?^K'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EYD3qyJ6im +++ mktemp ++ local LAST_ERR=/tmp/tmp.3lA2bpUEF2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EYD3qyJ6im ++ cat /tmp/tmp.3lA2bpUEF2 ++ rm /tmp/tmp.EYD3qyJ6im /tmp/tmp.3lA2bpUEF2 ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1.sql /tmp/tmp.OGJBrKbY7x/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.691ykcS5c8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DlypI7OB2I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.691ykcS5c8 ++ cat /tmp/tmp.DlypI7OB2I Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.691ykcS5c8 /tmp/tmp.DlypI7OB2I ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.7KgqzMFPuT +++ mktemp ++ local LAST_ERR=/tmp/tmp.BmlUM8FbDY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7KgqzMFPuT ++ cat /tmp/tmp.BmlUM8FbDY ++ rm /tmp/tmp.7KgqzMFPuT /tmp/tmp.BmlUM8FbDY ++ return 0 + secret_pass='^>]84CpfZXqvn(d?^K' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' +++ mktemp ++ base64 --decode ++ local LAST_OUT=/tmp/tmp.j1bCuQrXBl +++ mktemp ++ local LAST_ERR=/tmp/tmp.n6RLX4gY0h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j1bCuQrXBl ++ cat /tmp/tmp.n6RLX4gY0h ++ rm /tmp/tmp.j1bCuQrXBl /tmp/tmp.n6RLX4gY0h ++ return 0 + int_secret_pass='^>]84CpfZXqvn(d?^K' + [[ -z ^>]84CpfZXqvn(d?^K ]] + [[ ^>]84CpfZXqvn(d?^K != \^\>\]\8\4\C\p\f\Z\X\q\v\n\(\d\?\^\K ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''^>]84CpfZXqvn(d?^K'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''^>]84CpfZXqvn(d?^K'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''^>]84CpfZXqvn(d?^K'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''^>]84CpfZXqvn(d?^K'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RUuRQ1hAgu +++ mktemp ++ local LAST_ERR=/tmp/tmp.dJqOilpyPx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RUuRQ1hAgu ++ cat /tmp/tmp.dJqOilpyPx ++ rm /tmp/tmp.RUuRQ1hAgu /tmp/tmp.dJqOilpyPx ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql /tmp/tmp.OGJBrKbY7x/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.EZ98uwykxi +++ mktemp ++ local LAST_ERR=/tmp/tmp.z9BRtRHcGI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EZ98uwykxi ++ cat /tmp/tmp.z9BRtRHcGI ++ rm /tmp/tmp.EZ98uwykxi /tmp/tmp.z9BRtRHcGI ++ return 0 + secret_pass='%Y&xM%xl@d7b8M0NSO' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.XtUDnZxXmg +++ mktemp ++ local LAST_ERR=/tmp/tmp.qYKYSzkAJc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XtUDnZxXmg ++ cat /tmp/tmp.qYKYSzkAJc ++ rm /tmp/tmp.XtUDnZxXmg /tmp/tmp.qYKYSzkAJc ++ return 0 + int_secret_pass='%Y&xM%xl@d7b8M0NSO' + [[ -z %Y&xM%xl@d7b8M0NSO ]] + [[ %Y&xM%xl@d7b8M0NSO != \%\Y\&\x\M\%\x\l\@\d\7\b\8\M\0\N\S\O ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''%Y&xM%xl@d7b8M0NSO'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''%Y&xM%xl@d7b8M0NSO'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''%Y&xM%xl@d7b8M0NSO'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''%Y&xM%xl@d7b8M0NSO'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xxFR6rsK4s +++ mktemp ++ local LAST_ERR=/tmp/tmp.8qmlGubHkd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xxFR6rsK4s ++ cat /tmp/tmp.8qmlGubHkd ++ rm /tmp/tmp.xxFR6rsK4s /tmp/tmp.8qmlGubHkd ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql /tmp/tmp.OGJBrKbY7x/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.MtVkjcM3bK +++ mktemp ++ local LAST_ERR=/tmp/tmp.6Jm4V2JXq2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MtVkjcM3bK ++ cat /tmp/tmp.6Jm4V2JXq2 ++ rm /tmp/tmp.MtVkjcM3bK /tmp/tmp.6Jm4V2JXq2 ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.cOUnCg3tWK +++ mktemp ++ local LAST_ERR=/tmp/tmp.sOUo7Uje5T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cOUnCg3tWK ++ cat /tmp/tmp.sOUo7Uje5T ++ rm /tmp/tmp.cOUnCg3tWK /tmp/tmp.sOUo7Uje5T ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KbIbElVoum +++ mktemp ++ local LAST_ERR=/tmp/tmp.eBFzJWFfYq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KbIbElVoum ++ cat /tmp/tmp.eBFzJWFfYq ++ rm /tmp/tmp.KbIbElVoum /tmp/tmp.eBFzJWFfYq ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql /tmp/tmp.OGJBrKbY7x/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.1o0uEYjamL +++ mktemp ++ local LAST_ERR=/tmp/tmp.p8bzofz3So ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1o0uEYjamL ++ cat /tmp/tmp.p8bzofz3So ++ rm /tmp/tmp.1o0uEYjamL /tmp/tmp.p8bzofz3So ++ return 0 + secret_pass='uKeV0{HsZuK-KRPvCg' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.HVsNjmAIJO +++ mktemp ++ local LAST_ERR=/tmp/tmp.CspPWly7Ql ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HVsNjmAIJO ++ cat /tmp/tmp.CspPWly7Ql ++ rm /tmp/tmp.HVsNjmAIJO /tmp/tmp.CspPWly7Ql ++ return 0 + int_secret_pass='uKeV0{HsZuK-KRPvCg' + [[ -z uKeV0{HsZuK-KRPvCg ]] + [[ uKeV0{HsZuK-KRPvCg != \u\K\e\V\0\{\H\s\Z\u\K\-\K\R\P\v\C\g ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''uKeV0{HsZuK-KRPvCg'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''uKeV0{HsZuK-KRPvCg'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''uKeV0{HsZuK-KRPvCg'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''uKeV0{HsZuK-KRPvCg'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql /tmp/tmp.OGJBrKbY7x/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.HFnTlVYJbU +++ mktemp ++ local LAST_ERR=/tmp/tmp.1FlAgLHwvh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HFnTlVYJbU ++ cat /tmp/tmp.1FlAgLHwvh ++ rm /tmp/tmp.HFnTlVYJbU /tmp/tmp.1FlAgLHwvh ++ return 0 + secret_pass='Ha^D1.5AST+1C^OE' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.S6X5Rc1k06 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HzI4xGjtdz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S6X5Rc1k06 ++ cat /tmp/tmp.HzI4xGjtdz ++ rm /tmp/tmp.S6X5Rc1k06 /tmp/tmp.HzI4xGjtdz ++ return 0 + int_secret_pass='Ha^D1.5AST+1C^OE' + [[ -z Ha^D1.5AST+1C^OE ]] + [[ Ha^D1.5AST+1C^OE != \H\a\^\D\1\.\5\A\S\T\+\1\C\^\O\E ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''Ha^D1.5AST+1C^OE'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''Ha^D1.5AST+1C^OE'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''Ha^D1.5AST+1C^OE'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''Ha^D1.5AST+1C^OE'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9pKEZ4ksZ7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PaPh2F4Tjj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9pKEZ4ksZ7 ++ cat /tmp/tmp.PaPh2F4Tjj ++ rm /tmp/tmp.9pKEZ4ksZ7 /tmp/tmp.PaPh2F4Tjj ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql /tmp/tmp.OGJBrKbY7x/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.gHNLZfIsdF +++ mktemp ++ local LAST_ERR=/tmp/tmp.91FMqtUNfH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gHNLZfIsdF ++ cat /tmp/tmp.91FMqtUNfH ++ rm /tmp/tmp.gHNLZfIsdF /tmp/tmp.91FMqtUNfH ++ return 0 + secret_pass=')1#)(zom2g[a@V+52pl' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.U9PVOCDROh +++ mktemp ++ local LAST_ERR=/tmp/tmp.tvsW365lAu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U9PVOCDROh ++ cat /tmp/tmp.tvsW365lAu ++ rm /tmp/tmp.U9PVOCDROh /tmp/tmp.tvsW365lAu ++ return 0 + int_secret_pass=')1#)(zom2g[a@V+52pl' + [[ -z )1#)(zom2g[a@V+52pl ]] + [[ )1#)(zom2g[a@V+52pl != \)\1\#\)\(\z\o\m\2\g\[\a\@\V\+\5\2\p\l ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\'')1#)(zom2g[a@V+52pl'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\'')1#)(zom2g[a@V+52pl'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\'')1#)(zom2g[a@V+52pl'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\'')1#)(zom2g[a@V+52pl'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vGrGfqMdhB +++ mktemp ++ local LAST_ERR=/tmp/tmp.SKqfRD9c6R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vGrGfqMdhB ++ cat /tmp/tmp.SKqfRD9c6R ++ rm /tmp/tmp.vGrGfqMdhB /tmp/tmp.SKqfRD9c6R ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql /tmp/tmp.OGJBrKbY7x/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.tPQ6Fy1FfQ ++ mktemp + local LAST_ERR=/tmp/tmp.Mo05jdmZ88 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tPQ6Fy1FfQ secret/my-cluster-secrets patched + cat /tmp/tmp.Mo05jdmZ88 + rm /tmp/tmp.tPQ6Fy1FfQ /tmp/tmp.Mo05jdmZ88 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mts7V68Uuq +++ mktemp ++ local LAST_ERR=/tmp/tmp.54euZ5axby ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mts7V68Uuq ++ cat /tmp/tmp.54euZ5axby ++ rm /tmp/tmp.mts7V68Uuq /tmp/tmp.54euZ5axby ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql /tmp/tmp.OGJBrKbY7x/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.rL29K7hvV6 ++ mktemp + local LAST_ERR=/tmp/tmp.BDLgfMTxGS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rL29K7hvV6 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.BDLgfMTxGS + rm /tmp/tmp.rL29K7hvV6 /tmp/tmp.BDLgfMTxGS + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.erEVd91xSC +++ mktemp ++ local LAST_ERR=/tmp/tmp.j3XTpTIoNH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.erEVd91xSC ++ cat /tmp/tmp.j3XTpTIoNH ++ rm /tmp/tmp.erEVd91xSC /tmp/tmp.j3XTpTIoNH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TCu5G9pJen +++ mktemp ++ local LAST_ERR=/tmp/tmp.1CZrWZgRAK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TCu5G9pJen ++ cat /tmp/tmp.1CZrWZgRAK ++ rm /tmp/tmp.TCu5G9pJen /tmp/tmp.1CZrWZgRAK ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.REYpQI3qZF +++ mktemp ++ local LAST_ERR=/tmp/tmp.5Q0fyQY7MJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.REYpQI3qZF ++ cat /tmp/tmp.5Q0fyQY7MJ ++ rm /tmp/tmp.REYpQI3qZF /tmp/tmp.5Q0fyQY7MJ ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.aUOgdjHeyX ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.wYE5a1092P +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.aUOgdjHeyX +++++ cat /tmp/tmp.wYE5a1092P +++++ rm /tmp/tmp.aUOgdjHeyX /tmp/tmp.wYE5a1092P +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.HiC0vYUhFH ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hteu4tTYzX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.HiC0vYUhFH +++++ cat /tmp/tmp.hteu4tTYzX +++++ rm /tmp/tmp.HiC0vYUhFH /tmp/tmp.hteu4tTYzX +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XZcrOMIyW3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3i3XQFqJXR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XZcrOMIyW3 ++ cat /tmp/tmp.3i3XQFqJXR ++ rm /tmp/tmp.XZcrOMIyW3 /tmp/tmp.3i3XQFqJXR ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.8DL2r4RgVB ++ mktemp + local LAST_ERR=/tmp/tmp.yvA3ErCSkx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8DL2r4RgVB secret/my-cluster-secrets patched + cat /tmp/tmp.yvA3ErCSkx + rm /tmp/tmp.8DL2r4RgVB /tmp/tmp.yvA3ErCSkx + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7VuSjGiO0H +++ mktemp ++ local LAST_ERR=/tmp/tmp.iBTIzWMcgS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7VuSjGiO0H ++ cat /tmp/tmp.iBTIzWMcgS ++ rm /tmp/tmp.7VuSjGiO0H /tmp/tmp.iBTIzWMcgS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZEfq5zGtTE +++ mktemp ++ local LAST_ERR=/tmp/tmp.arwVrZC1lB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZEfq5zGtTE ++ cat /tmp/tmp.arwVrZC1lB ++ rm /tmp/tmp.ZEfq5zGtTE /tmp/tmp.arwVrZC1lB ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.onqbS6mILO +++ mktemp ++ local LAST_ERR=/tmp/tmp.xg5Y9wF9mG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.onqbS6mILO ++ cat /tmp/tmp.xg5Y9wF9mG ++ rm /tmp/tmp.onqbS6mILO /tmp/tmp.xg5Y9wF9mG ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.3U9l9NV55z ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.6WMNzlwTjz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.3U9l9NV55z +++++ cat /tmp/tmp.6WMNzlwTjz +++++ rm /tmp/tmp.3U9l9NV55z /tmp/tmp.6WMNzlwTjz +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5ynuffsUPo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.9myNY496iv +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5ynuffsUPo +++++ cat /tmp/tmp.9myNY496iv +++++ rm /tmp/tmp.5ynuffsUPo /tmp/tmp.9myNY496iv +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cWLuMChEAa +++ mktemp ++ local LAST_ERR=/tmp/tmp.gfMaM3xhbe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cWLuMChEAa ++ cat /tmp/tmp.gfMaM3xhbe ++ rm /tmp/tmp.cWLuMChEAa /tmp/tmp.gfMaM3xhbe ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql /tmp/tmp.OGJBrKbY7x/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql /tmp/tmp.OGJBrKbY7x/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql /tmp/tmp.OGJBrKbY7x/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.6O07qqjFTV ++ mktemp + local LAST_ERR=/tmp/tmp.rP6S86boLH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6O07qqjFTV perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.rP6S86boLH + rm /tmp/tmp.6O07qqjFTV /tmp/tmp.rP6S86boLH + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.78wqzwmNlV ++ mktemp + local LAST_ERR=/tmp/tmp.YIN7b3mxs5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.78wqzwmNlV secret/my-cluster-secrets patched + cat /tmp/tmp.YIN7b3mxs5 + rm /tmp/tmp.78wqzwmNlV /tmp/tmp.YIN7b3mxs5 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OQDskVJNCB +++ mktemp ++ local LAST_ERR=/tmp/tmp.wKnUPNKtjX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OQDskVJNCB ++ cat /tmp/tmp.wKnUPNKtjX ++ rm /tmp/tmp.OQDskVJNCB /tmp/tmp.wKnUPNKtjX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q52ZF0JLL4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gGZpzbRHHH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q52ZF0JLL4 ++ cat /tmp/tmp.gGZpzbRHHH ++ rm /tmp/tmp.Q52ZF0JLL4 /tmp/tmp.gGZpzbRHHH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Nv3YFmkryx +++ mktemp ++ local LAST_ERR=/tmp/tmp.o2mpMst0dR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Nv3YFmkryx ++ cat /tmp/tmp.o2mpMst0dR ++ rm /tmp/tmp.Nv3YFmkryx /tmp/tmp.o2mpMst0dR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PAHH4ruejU +++ mktemp ++ local LAST_ERR=/tmp/tmp.6xWP4kdS4p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PAHH4ruejU ++ cat /tmp/tmp.6xWP4kdS4p ++ rm /tmp/tmp.PAHH4ruejU /tmp/tmp.6xWP4kdS4p ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s3azu5Aztw +++ mktemp ++ local LAST_ERR=/tmp/tmp.awLIpBgl93 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s3azu5Aztw ++ cat /tmp/tmp.awLIpBgl93 ++ rm /tmp/tmp.s3azu5Aztw /tmp/tmp.awLIpBgl93 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TxKfTJzN7m +++ mktemp ++ local LAST_ERR=/tmp/tmp.PmlF9Pm0do ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TxKfTJzN7m ++ cat /tmp/tmp.PmlF9Pm0do ++ rm /tmp/tmp.TxKfTJzN7m /tmp/tmp.PmlF9Pm0do ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Bfg5MWWLFH +++ mktemp ++ local LAST_ERR=/tmp/tmp.npturrLH8Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Bfg5MWWLFH ++ cat /tmp/tmp.npturrLH8Q ++ rm /tmp/tmp.Bfg5MWWLFH /tmp/tmp.npturrLH8Q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yYYyJfxobX +++ mktemp ++ local LAST_ERR=/tmp/tmp.lcqxGnEPPA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yYYyJfxobX ++ cat /tmp/tmp.lcqxGnEPPA ++ rm /tmp/tmp.yYYyJfxobX /tmp/tmp.lcqxGnEPPA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lufRsFp5L3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IsxVviEIEa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lufRsFp5L3 ++ cat /tmp/tmp.IsxVviEIEa ++ rm /tmp/tmp.lufRsFp5L3 /tmp/tmp.IsxVviEIEa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i3BEsOgxcw +++ mktemp ++ local LAST_ERR=/tmp/tmp.ne7mtQdeub ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i3BEsOgxcw ++ cat /tmp/tmp.ne7mtQdeub ++ rm /tmp/tmp.i3BEsOgxcw /tmp/tmp.ne7mtQdeub ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eJKBL6Exdm +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jh3rtyo7kR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eJKBL6Exdm ++ cat /tmp/tmp.Jh3rtyo7kR ++ rm /tmp/tmp.eJKBL6Exdm /tmp/tmp.Jh3rtyo7kR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1ZsPJKWzKW +++ mktemp ++ local LAST_ERR=/tmp/tmp.eQu67GCnfu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1ZsPJKWzKW ++ cat /tmp/tmp.eQu67GCnfu ++ rm /tmp/tmp.1ZsPJKWzKW /tmp/tmp.eQu67GCnfu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JvkF4s80II +++ mktemp ++ local LAST_ERR=/tmp/tmp.YqnQmsszBN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JvkF4s80II ++ cat /tmp/tmp.YqnQmsszBN ++ rm /tmp/tmp.JvkF4s80II /tmp/tmp.YqnQmsszBN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LCvqxPOnu4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.b83vNnR4rg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LCvqxPOnu4 ++ cat /tmp/tmp.b83vNnR4rg ++ rm /tmp/tmp.LCvqxPOnu4 /tmp/tmp.b83vNnR4rg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zMD1tY2uII +++ mktemp ++ local LAST_ERR=/tmp/tmp.8dqtPSlmjT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zMD1tY2uII ++ cat /tmp/tmp.8dqtPSlmjT ++ rm /tmp/tmp.zMD1tY2uII /tmp/tmp.8dqtPSlmjT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wFYRsx187l +++ mktemp ++ local LAST_ERR=/tmp/tmp.rBPToUbPh5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wFYRsx187l ++ cat /tmp/tmp.rBPToUbPh5 ++ rm /tmp/tmp.wFYRsx187l /tmp/tmp.rBPToUbPh5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Tl76tRuzn9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gb0XfotRnQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Tl76tRuzn9 ++ cat /tmp/tmp.gb0XfotRnQ ++ rm /tmp/tmp.Tl76tRuzn9 /tmp/tmp.gb0XfotRnQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4kTOI0kBfR +++ mktemp ++ local LAST_ERR=/tmp/tmp.Qifkvxj5DR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4kTOI0kBfR ++ cat /tmp/tmp.Qifkvxj5DR ++ rm /tmp/tmp.4kTOI0kBfR /tmp/tmp.Qifkvxj5DR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WV3n4ckFqh +++ mktemp ++ local LAST_ERR=/tmp/tmp.kPOudJLNqI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WV3n4ckFqh ++ cat /tmp/tmp.kPOudJLNqI ++ rm /tmp/tmp.WV3n4ckFqh /tmp/tmp.kPOudJLNqI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P3Tf38NXc7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.a8b0ixfi1y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P3Tf38NXc7 ++ cat /tmp/tmp.a8b0ixfi1y ++ rm /tmp/tmp.P3Tf38NXc7 /tmp/tmp.a8b0ixfi1y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uBDPk5fsns +++ mktemp ++ local LAST_ERR=/tmp/tmp.MlNbPFbKzj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uBDPk5fsns ++ cat /tmp/tmp.MlNbPFbKzj ++ rm /tmp/tmp.uBDPk5fsns /tmp/tmp.MlNbPFbKzj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3PIBlEl7gU +++ mktemp ++ local LAST_ERR=/tmp/tmp.dyxmdjFFUk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3PIBlEl7gU ++ cat /tmp/tmp.dyxmdjFFUk ++ rm /tmp/tmp.3PIBlEl7gU /tmp/tmp.dyxmdjFFUk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PHv8rbLCWc +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xwv9FnCdFP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PHv8rbLCWc ++ cat /tmp/tmp.Xwv9FnCdFP ++ rm /tmp/tmp.PHv8rbLCWc /tmp/tmp.Xwv9FnCdFP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.st2QepTjrt +++ mktemp ++ local LAST_ERR=/tmp/tmp.AwFQPCXmBV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.st2QepTjrt ++ cat /tmp/tmp.AwFQPCXmBV ++ rm /tmp/tmp.st2QepTjrt /tmp/tmp.AwFQPCXmBV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p6rj3gIRPK +++ mktemp ++ local LAST_ERR=/tmp/tmp.bJIMPKisCW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p6rj3gIRPK ++ cat /tmp/tmp.bJIMPKisCW ++ rm /tmp/tmp.p6rj3gIRPK /tmp/tmp.bJIMPKisCW ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eMFhBXoG8B +++ mktemp ++ local LAST_ERR=/tmp/tmp.FoZjVPsXhq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eMFhBXoG8B ++ cat /tmp/tmp.FoZjVPsXhq ++ rm /tmp/tmp.eMFhBXoG8B /tmp/tmp.FoZjVPsXhq ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.cDrlWxv38W ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.S4mMlDBmmA +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.cDrlWxv38W +++++ cat /tmp/tmp.S4mMlDBmmA +++++ rm /tmp/tmp.cDrlWxv38W /tmp/tmp.S4mMlDBmmA +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vO0M9yWmOK ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.qUL4QwNr3B +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vO0M9yWmOK +++++ cat /tmp/tmp.qUL4QwNr3B +++++ rm /tmp/tmp.vO0M9yWmOK /tmp/tmp.qUL4QwNr3B +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eQMYptOUWr +++ mktemp ++ local LAST_ERR=/tmp/tmp.dYp7Qkksqo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eQMYptOUWr ++ cat /tmp/tmp.dYp7Qkksqo ++ rm /tmp/tmp.eQMYptOUWr /tmp/tmp.dYp7Qkksqo ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-3.sql /tmp/tmp.OGJBrKbY7x/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.F3djXxrsra ++ mktemp + local LAST_ERR=/tmp/tmp.1T2zMV06ub + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.F3djXxrsra secret/my-cluster-secrets patched + cat /tmp/tmp.1T2zMV06ub + rm /tmp/tmp.F3djXxrsra /tmp/tmp.1T2zMV06ub + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.AQonMemUSg +++ mktemp ++ local LAST_ERR=/tmp/tmp.mxk5wGdc1i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AQonMemUSg ++ cat /tmp/tmp.mxk5wGdc1i ++ rm /tmp/tmp.AQonMemUSg /tmp/tmp.mxk5wGdc1i ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B6DecMFjUN +++ mktemp ++ local LAST_ERR=/tmp/tmp.p4DXjswbb8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B6DecMFjUN ++ cat /tmp/tmp.p4DXjswbb8 ++ rm /tmp/tmp.B6DecMFjUN /tmp/tmp.p4DXjswbb8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t6n5qFgoyf +++ mktemp ++ local LAST_ERR=/tmp/tmp.RyA2LDZyqe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t6n5qFgoyf ++ cat /tmp/tmp.RyA2LDZyqe ++ rm /tmp/tmp.t6n5qFgoyf /tmp/tmp.RyA2LDZyqe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ojEqdHTEdj +++ mktemp ++ local LAST_ERR=/tmp/tmp.sFussyAhwj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ojEqdHTEdj ++ cat /tmp/tmp.sFussyAhwj ++ rm /tmp/tmp.ojEqdHTEdj /tmp/tmp.sFussyAhwj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vKOlg6ro02 +++ mktemp ++ local LAST_ERR=/tmp/tmp.50QCP9WFNr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vKOlg6ro02 ++ cat /tmp/tmp.50QCP9WFNr ++ rm /tmp/tmp.vKOlg6ro02 /tmp/tmp.50QCP9WFNr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9gTdS66bxi +++ mktemp ++ local LAST_ERR=/tmp/tmp.6eTK2aC7gV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9gTdS66bxi ++ cat /tmp/tmp.6eTK2aC7gV ++ rm /tmp/tmp.9gTdS66bxi /tmp/tmp.6eTK2aC7gV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ltknROVLYX +++ mktemp ++ local LAST_ERR=/tmp/tmp.w12vMXrkgK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ltknROVLYX ++ cat /tmp/tmp.w12vMXrkgK ++ rm /tmp/tmp.ltknROVLYX /tmp/tmp.w12vMXrkgK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ujQ3qlTjFq +++ mktemp ++ local LAST_ERR=/tmp/tmp.kbxolpiIF2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ujQ3qlTjFq ++ cat /tmp/tmp.kbxolpiIF2 ++ rm /tmp/tmp.ujQ3qlTjFq /tmp/tmp.kbxolpiIF2 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qOy7XNeDLX +++ mktemp ++ local LAST_ERR=/tmp/tmp.G7piPfXH7Z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qOy7XNeDLX ++ cat /tmp/tmp.G7piPfXH7Z ++ rm /tmp/tmp.qOy7XNeDLX /tmp/tmp.G7piPfXH7Z ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.or66RZSAt6 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.HzPPnK4DFg +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.or66RZSAt6 +++++ cat /tmp/tmp.HzPPnK4DFg +++++ rm /tmp/tmp.or66RZSAt6 /tmp/tmp.HzPPnK4DFg +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tuIa78eRBF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.eBHS6pFXXK +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tuIa78eRBF +++++ cat /tmp/tmp.eBHS6pFXXK +++++ rm /tmp/tmp.tuIa78eRBF /tmp/tmp.eBHS6pFXXK +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VRmIINxv3g +++ mktemp ++ local LAST_ERR=/tmp/tmp.ctyBNJUapc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VRmIINxv3g ++ cat /tmp/tmp.ctyBNJUapc ++ rm /tmp/tmp.VRmIINxv3g /tmp/tmp.ctyBNJUapc ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KvDuPZqQnK +++ mktemp ++ local LAST_ERR=/tmp/tmp.RIpF2G70IQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KvDuPZqQnK ++ cat /tmp/tmp.RIpF2G70IQ ++ rm /tmp/tmp.KvDuPZqQnK /tmp/tmp.RIpF2G70IQ ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql /tmp/tmp.OGJBrKbY7x/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.WyMC7YvN2p ++ mktemp + local LAST_ERR=/tmp/tmp.UGkbB6lzRe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WyMC7YvN2p secret/my-cluster-secrets patched + cat /tmp/tmp.UGkbB6lzRe + rm /tmp/tmp.WyMC7YvN2p /tmp/tmp.UGkbB6lzRe + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4LwadV498N +++ mktemp ++ local LAST_ERR=/tmp/tmp.iIwzTG64Ar ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4LwadV498N ++ cat /tmp/tmp.iIwzTG64Ar ++ rm /tmp/tmp.4LwadV498N /tmp/tmp.iIwzTG64Ar ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ue6sPeE0q7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oMFQMDdMPs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ue6sPeE0q7 ++ cat /tmp/tmp.oMFQMDdMPs ++ rm /tmp/tmp.ue6sPeE0q7 /tmp/tmp.oMFQMDdMPs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pKb4RzOajp +++ mktemp ++ local LAST_ERR=/tmp/tmp.QqVKtSq8FK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pKb4RzOajp ++ cat /tmp/tmp.QqVKtSq8FK ++ rm /tmp/tmp.pKb4RzOajp /tmp/tmp.QqVKtSq8FK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pHpBN43Ijj +++ mktemp ++ local LAST_ERR=/tmp/tmp.MiVaZkbg59 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pHpBN43Ijj ++ cat /tmp/tmp.MiVaZkbg59 ++ rm /tmp/tmp.pHpBN43Ijj /tmp/tmp.MiVaZkbg59 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yTSiYPC3KQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.W8hq5foDts ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yTSiYPC3KQ ++ cat /tmp/tmp.W8hq5foDts ++ rm /tmp/tmp.yTSiYPC3KQ /tmp/tmp.W8hq5foDts ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.meHCKXMue3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FoYt2r4YTG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.meHCKXMue3 ++ cat /tmp/tmp.FoYt2r4YTG ++ rm /tmp/tmp.meHCKXMue3 /tmp/tmp.FoYt2r4YTG ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.9Squ9RBda3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.qLAaFjUrSa +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.9Squ9RBda3 +++++ cat /tmp/tmp.qLAaFjUrSa +++++ rm /tmp/tmp.9Squ9RBda3 /tmp/tmp.qLAaFjUrSa +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.x6wGkzErNH ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xSi9DR0WH4 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.x6wGkzErNH +++++ cat /tmp/tmp.xSi9DR0WH4 +++++ rm /tmp/tmp.x6wGkzErNH /tmp/tmp.xSi9DR0WH4 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xGlpqTaHJg +++ mktemp ++ local LAST_ERR=/tmp/tmp.EKrBj4NLVl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xGlpqTaHJg ++ cat /tmp/tmp.EKrBj4NLVl ++ rm /tmp/tmp.xGlpqTaHJg /tmp/tmp.EKrBj4NLVl ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3p0RG8VWjG +++ mktemp ++ local LAST_ERR=/tmp/tmp.H6686lMKkz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3p0RG8VWjG ++ cat /tmp/tmp.H6686lMKkz ++ rm /tmp/tmp.3p0RG8VWjG /tmp/tmp.H6686lMKkz ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql /tmp/tmp.OGJBrKbY7x/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.oWN0uwAQlx ++ mktemp + local LAST_ERR=/tmp/tmp.SMxVyulMzD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oWN0uwAQlx perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.SMxVyulMzD + rm /tmp/tmp.oWN0uwAQlx /tmp/tmp.SMxVyulMzD + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iNSDHaBrrX +++ mktemp ++ local LAST_ERR=/tmp/tmp.xJaoZJ43Tq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iNSDHaBrrX ++ cat /tmp/tmp.xJaoZJ43Tq ++ rm /tmp/tmp.iNSDHaBrrX /tmp/tmp.xJaoZJ43Tq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.whEu5UBon1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.04X6NUjtLr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.whEu5UBon1 ++ cat /tmp/tmp.04X6NUjtLr ++ rm /tmp/tmp.whEu5UBon1 /tmp/tmp.04X6NUjtLr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ag0vxj6EMT +++ mktemp ++ local LAST_ERR=/tmp/tmp.F4WtARVBLs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ag0vxj6EMT ++ cat /tmp/tmp.F4WtARVBLs ++ rm /tmp/tmp.Ag0vxj6EMT /tmp/tmp.F4WtARVBLs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vxjg8f7ssn +++ mktemp ++ local LAST_ERR=/tmp/tmp.DfaisVJ9gT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vxjg8f7ssn ++ cat /tmp/tmp.DfaisVJ9gT ++ rm /tmp/tmp.Vxjg8f7ssn /tmp/tmp.DfaisVJ9gT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0Q9H6K7IHM +++ mktemp ++ local LAST_ERR=/tmp/tmp.tO3fJwsW3A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0Q9H6K7IHM ++ cat /tmp/tmp.tO3fJwsW3A ++ rm /tmp/tmp.0Q9H6K7IHM /tmp/tmp.tO3fJwsW3A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6abDWNZKmE +++ mktemp ++ local LAST_ERR=/tmp/tmp.iarLEG13Zz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6abDWNZKmE ++ cat /tmp/tmp.iarLEG13Zz ++ rm /tmp/tmp.6abDWNZKmE /tmp/tmp.iarLEG13Zz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tO5LqOw01b +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lx4j0xnb3G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tO5LqOw01b ++ cat /tmp/tmp.Lx4j0xnb3G ++ rm /tmp/tmp.tO5LqOw01b /tmp/tmp.Lx4j0xnb3G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QFB4JLe00V +++ mktemp ++ local LAST_ERR=/tmp/tmp.swyceQvQOh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QFB4JLe00V ++ cat /tmp/tmp.swyceQvQOh ++ rm /tmp/tmp.QFB4JLe00V /tmp/tmp.swyceQvQOh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tEdDdhlECq +++ mktemp ++ local LAST_ERR=/tmp/tmp.fBnYReQEfD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tEdDdhlECq ++ cat /tmp/tmp.fBnYReQEfD ++ rm /tmp/tmp.tEdDdhlECq /tmp/tmp.fBnYReQEfD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ROyDEclpxN +++ mktemp ++ local LAST_ERR=/tmp/tmp.PTWMikSU9B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ROyDEclpxN ++ cat /tmp/tmp.PTWMikSU9B ++ rm /tmp/tmp.ROyDEclpxN /tmp/tmp.PTWMikSU9B ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KP3rJigZnD +++ mktemp ++ local LAST_ERR=/tmp/tmp.8qzVSnvVrJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KP3rJigZnD ++ cat /tmp/tmp.8qzVSnvVrJ ++ rm /tmp/tmp.KP3rJigZnD /tmp/tmp.8qzVSnvVrJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RW89V5H99B +++ mktemp ++ local LAST_ERR=/tmp/tmp.p7KHw7vJmE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RW89V5H99B ++ cat /tmp/tmp.p7KHw7vJmE ++ rm /tmp/tmp.RW89V5H99B /tmp/tmp.p7KHw7vJmE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5TotQHAS04 +++ mktemp ++ local LAST_ERR=/tmp/tmp.L5CPK4rmJp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5TotQHAS04 ++ cat /tmp/tmp.L5CPK4rmJp ++ rm /tmp/tmp.5TotQHAS04 /tmp/tmp.L5CPK4rmJp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CbyjANiQaw +++ mktemp ++ local LAST_ERR=/tmp/tmp.m6B9ule8ro ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CbyjANiQaw ++ cat /tmp/tmp.m6B9ule8ro ++ rm /tmp/tmp.CbyjANiQaw /tmp/tmp.m6B9ule8ro ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LINzZTwsny +++ mktemp ++ local LAST_ERR=/tmp/tmp.n27vuLC6O8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LINzZTwsny ++ cat /tmp/tmp.n27vuLC6O8 ++ rm /tmp/tmp.LINzZTwsny /tmp/tmp.n27vuLC6O8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TebV4FQMjk +++ mktemp ++ local LAST_ERR=/tmp/tmp.TqavXjy76b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TebV4FQMjk ++ cat /tmp/tmp.TqavXjy76b ++ rm /tmp/tmp.TebV4FQMjk /tmp/tmp.TqavXjy76b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tFOIuDESFl +++ mktemp ++ local LAST_ERR=/tmp/tmp.KrR6nrFnNa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tFOIuDESFl ++ cat /tmp/tmp.KrR6nrFnNa ++ rm /tmp/tmp.tFOIuDESFl /tmp/tmp.KrR6nrFnNa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OiyHO63RZ4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xjEDfB3Cd2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OiyHO63RZ4 ++ cat /tmp/tmp.xjEDfB3Cd2 ++ rm /tmp/tmp.OiyHO63RZ4 /tmp/tmp.xjEDfB3Cd2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2xkQbw0ZNu +++ mktemp ++ local LAST_ERR=/tmp/tmp.QkuC5XTyjB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2xkQbw0ZNu ++ cat /tmp/tmp.QkuC5XTyjB ++ rm /tmp/tmp.2xkQbw0ZNu /tmp/tmp.QkuC5XTyjB ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CqEEbjJFw5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fF8g0IJInY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CqEEbjJFw5 ++ cat /tmp/tmp.fF8g0IJInY ++ rm /tmp/tmp.CqEEbjJFw5 /tmp/tmp.fF8g0IJInY ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.LPhjpALimg ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.RI46D9BtUv +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.LPhjpALimg +++++ cat /tmp/tmp.RI46D9BtUv +++++ rm /tmp/tmp.LPhjpALimg /tmp/tmp.RI46D9BtUv +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.m0jRvb6ueD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Q5TaR50N4r +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.m0jRvb6ueD +++++ cat /tmp/tmp.Q5TaR50N4r +++++ rm /tmp/tmp.m0jRvb6ueD /tmp/tmp.Q5TaR50N4r +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vAha2AQ8or +++ mktemp ++ local LAST_ERR=/tmp/tmp.VdCkPXMwZ6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vAha2AQ8or ++ cat /tmp/tmp.VdCkPXMwZ6 ++ rm /tmp/tmp.vAha2AQ8or /tmp/tmp.VdCkPXMwZ6 ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.RpO9ldKghv ++ mktemp + local LAST_ERR=/tmp/tmp.BTac3CPT8Y + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RpO9ldKghv secret/my-cluster-secrets-2 patched + cat /tmp/tmp.BTac3CPT8Y + rm /tmp/tmp.RpO9ldKghv /tmp/tmp.BTac3CPT8Y + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.emB1BlNRKD +++ mktemp ++ local LAST_ERR=/tmp/tmp.mJZMQsHzhq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.emB1BlNRKD ++ cat /tmp/tmp.mJZMQsHzhq ++ rm /tmp/tmp.emB1BlNRKD /tmp/tmp.mJZMQsHzhq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NrEd7LGqp3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.36aAp5uphz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NrEd7LGqp3 ++ cat /tmp/tmp.36aAp5uphz ++ rm /tmp/tmp.NrEd7LGqp3 /tmp/tmp.36aAp5uphz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eCIpmi4LhO +++ mktemp ++ local LAST_ERR=/tmp/tmp.9Opy9wnxNL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eCIpmi4LhO ++ cat /tmp/tmp.9Opy9wnxNL ++ rm /tmp/tmp.eCIpmi4LhO /tmp/tmp.9Opy9wnxNL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CJ0Lz3TzG7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rabdRAPTtq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CJ0Lz3TzG7 ++ cat /tmp/tmp.rabdRAPTtq ++ rm /tmp/tmp.CJ0Lz3TzG7 /tmp/tmp.rabdRAPTtq ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.YoJkky2X4G ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mxR8WyNo0C +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.YoJkky2X4G +++++ cat /tmp/tmp.mxR8WyNo0C +++++ rm /tmp/tmp.YoJkky2X4G /tmp/tmp.mxR8WyNo0C +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.t7F59bmz00 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.kd0VYpjNdM +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.t7F59bmz00 +++++ cat /tmp/tmp.kd0VYpjNdM +++++ rm /tmp/tmp.t7F59bmz00 /tmp/tmp.kd0VYpjNdM +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9OKJY9f2q1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.At0jE8mSr2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9OKJY9f2q1 ++ cat /tmp/tmp.At0jE8mSr2 ++ rm /tmp/tmp.9OKJY9f2q1 /tmp/tmp.At0jE8mSr2 ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JXJEIXh7Wn +++ mktemp ++ local LAST_ERR=/tmp/tmp.d0ZFYq1RB6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JXJEIXh7Wn ++ cat /tmp/tmp.d0ZFYq1RB6 ++ rm /tmp/tmp.JXJEIXh7Wn /tmp/tmp.d0ZFYq1RB6 ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql /tmp/tmp.OGJBrKbY7x/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.FVGvHO6dpI +++ mktemp ++ local LAST_ERR=/tmp/tmp.KEHw7V4YVN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FVGvHO6dpI ++ cat /tmp/tmp.KEHw7V4YVN ++ rm /tmp/tmp.FVGvHO6dpI /tmp/tmp.KEHw7V4YVN ++ return 0 + newpass='uWg[fu3Q5.JiGhOJ' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''uWg[fu3Q5.JiGhOJ'\'';' '-h some-name-pxc -uroot -p'\''uWg[fu3Q5.JiGhOJ'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''uWg[fu3Q5.JiGhOJ'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''uWg[fu3Q5.JiGhOJ'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XpQfTXVXqG +++ mktemp ++ local LAST_ERR=/tmp/tmp.Nb9TlmXYY6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XpQfTXVXqG ++ cat /tmp/tmp.Nb9TlmXYY6 ++ rm /tmp/tmp.XpQfTXVXqG /tmp/tmp.Nb9TlmXYY6 ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''uWg[fu3Q5.JiGhOJ'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''uWg[fu3Q5.JiGhOJ'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''uWg[fu3Q5.JiGhOJ'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''uWg[fu3Q5.JiGhOJ'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p1mN0voVdS +++ mktemp ++ local LAST_ERR=/tmp/tmp.QuSJzMSbHT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p1mN0voVdS ++ cat /tmp/tmp.QuSJzMSbHT ++ rm /tmp/tmp.p1mN0voVdS /tmp/tmp.QuSJzMSbHT ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql /tmp/tmp.OGJBrKbY7x/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.YEk0oTVV7H +++ mktemp ++ local LAST_ERR=/tmp/tmp.QccS1hRAhy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YEk0oTVV7H ++ cat /tmp/tmp.QccS1hRAhy ++ rm /tmp/tmp.YEk0oTVV7H /tmp/tmp.QccS1hRAhy ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.tAfQvwptry ++ mktemp + local LAST_ERR=/tmp/tmp.F00h2AHaXj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tAfQvwptry secret/my-cluster-secrets-2 configured + cat /tmp/tmp.F00h2AHaXj Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.tAfQvwptry /tmp/tmp.F00h2AHaXj + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PB94NVpPaX +++ mktemp ++ local LAST_ERR=/tmp/tmp.SuPFthJoXX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PB94NVpPaX ++ cat /tmp/tmp.SuPFthJoXX ++ rm /tmp/tmp.PB94NVpPaX /tmp/tmp.SuPFthJoXX ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql /tmp/tmp.OGJBrKbY7x/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-22226~ + local LAST_OUT=/tmp/tmp.sda0ZZtXtS + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2193-1eb37b20#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_ERR=/tmp/tmp.JhDuJxYEGm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sda0ZZtXtS perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.JhDuJxYEGm + rm /tmp/tmp.sda0ZZtXtS /tmp/tmp.JhDuJxYEGm + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T4qultXU3Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.njQ9gTj6Ll ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T4qultXU3Z ++ cat /tmp/tmp.njQ9gTj6Ll ++ rm /tmp/tmp.T4qultXU3Z /tmp/tmp.njQ9gTj6Ll ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hHo6WCEyKi +++ mktemp ++ local LAST_ERR=/tmp/tmp.pLtOKTdM1C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hHo6WCEyKi ++ cat /tmp/tmp.pLtOKTdM1C ++ rm /tmp/tmp.hHo6WCEyKi /tmp/tmp.pLtOKTdM1C ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.akqAEvlwEY +++ mktemp ++ local LAST_ERR=/tmp/tmp.sAqohmBOSI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.akqAEvlwEY ++ cat /tmp/tmp.sAqohmBOSI ++ rm /tmp/tmp.akqAEvlwEY /tmp/tmp.sAqohmBOSI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4xhSHfvu8Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.sGo83LSD7P ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4xhSHfvu8Z ++ cat /tmp/tmp.sGo83LSD7P ++ rm /tmp/tmp.4xhSHfvu8Z /tmp/tmp.sGo83LSD7P ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MkP2px9RfP +++ mktemp ++ local LAST_ERR=/tmp/tmp.2Za9NWPLzs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MkP2px9RfP ++ cat /tmp/tmp.2Za9NWPLzs ++ rm /tmp/tmp.MkP2px9RfP /tmp/tmp.2Za9NWPLzs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nS3lyGfjnQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ys1C7t0xqb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nS3lyGfjnQ ++ cat /tmp/tmp.Ys1C7t0xqb ++ rm /tmp/tmp.nS3lyGfjnQ /tmp/tmp.Ys1C7t0xqb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SGNzLe6I0n +++ mktemp ++ local LAST_ERR=/tmp/tmp.I8VKMjZG53 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SGNzLe6I0n ++ cat /tmp/tmp.I8VKMjZG53 ++ rm /tmp/tmp.SGNzLe6I0n /tmp/tmp.I8VKMjZG53 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3DezvjOuVz +++ mktemp ++ local LAST_ERR=/tmp/tmp.lxw8GIwYs0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3DezvjOuVz ++ cat /tmp/tmp.lxw8GIwYs0 ++ rm /tmp/tmp.3DezvjOuVz /tmp/tmp.lxw8GIwYs0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.giD0gj3svJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.hbUfGX1Ujj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.giD0gj3svJ ++ cat /tmp/tmp.hbUfGX1Ujj ++ rm /tmp/tmp.giD0gj3svJ /tmp/tmp.hbUfGX1Ujj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zl2ce5u5EH +++ mktemp ++ local LAST_ERR=/tmp/tmp.kRwv1XQTGT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zl2ce5u5EH ++ cat /tmp/tmp.kRwv1XQTGT ++ rm /tmp/tmp.Zl2ce5u5EH /tmp/tmp.kRwv1XQTGT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aUiTFWZANe +++ mktemp ++ local LAST_ERR=/tmp/tmp.78HTsfSFHB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aUiTFWZANe ++ cat /tmp/tmp.78HTsfSFHB ++ rm /tmp/tmp.aUiTFWZANe /tmp/tmp.78HTsfSFHB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nqIvx2YzoB +++ mktemp ++ local LAST_ERR=/tmp/tmp.L7tha5rrN8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nqIvx2YzoB ++ cat /tmp/tmp.L7tha5rrN8 ++ rm /tmp/tmp.nqIvx2YzoB /tmp/tmp.L7tha5rrN8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KZdor1ytzA +++ mktemp ++ local LAST_ERR=/tmp/tmp.E7ZDAPoPY2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KZdor1ytzA ++ cat /tmp/tmp.E7ZDAPoPY2 ++ rm /tmp/tmp.KZdor1ytzA /tmp/tmp.E7ZDAPoPY2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Sez1Otu5Be +++ mktemp ++ local LAST_ERR=/tmp/tmp.7xsFuHHoDh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Sez1Otu5Be ++ cat /tmp/tmp.7xsFuHHoDh ++ rm /tmp/tmp.Sez1Otu5Be /tmp/tmp.7xsFuHHoDh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JH6S1nP6Yx +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gg2kHVlfXa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JH6S1nP6Yx ++ cat /tmp/tmp.Gg2kHVlfXa ++ rm /tmp/tmp.JH6S1nP6Yx /tmp/tmp.Gg2kHVlfXa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RmR5p95w7K +++ mktemp ++ local LAST_ERR=/tmp/tmp.mmzcxqL0gM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RmR5p95w7K ++ cat /tmp/tmp.mmzcxqL0gM ++ rm /tmp/tmp.RmR5p95w7K /tmp/tmp.mmzcxqL0gM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zU7kJrVDXb +++ mktemp ++ local LAST_ERR=/tmp/tmp.YneXbPtvL6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zU7kJrVDXb ++ cat /tmp/tmp.YneXbPtvL6 ++ rm /tmp/tmp.zU7kJrVDXb /tmp/tmp.YneXbPtvL6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CvccMeY3gf +++ mktemp ++ local LAST_ERR=/tmp/tmp.1aLbYv64YR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CvccMeY3gf ++ cat /tmp/tmp.1aLbYv64YR ++ rm /tmp/tmp.CvccMeY3gf /tmp/tmp.1aLbYv64YR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RwDA9PDTKj +++ mktemp ++ local LAST_ERR=/tmp/tmp.rOatBuVtIa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RwDA9PDTKj ++ cat /tmp/tmp.rOatBuVtIa ++ rm /tmp/tmp.RwDA9PDTKj /tmp/tmp.rOatBuVtIa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1BfnQRbieO +++ mktemp ++ local LAST_ERR=/tmp/tmp.HqnUccBfe4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1BfnQRbieO ++ cat /tmp/tmp.HqnUccBfe4 ++ rm /tmp/tmp.1BfnQRbieO /tmp/tmp.HqnUccBfe4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u86xsTYwr3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.saxvt7dpq2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u86xsTYwr3 ++ cat /tmp/tmp.saxvt7dpq2 ++ rm /tmp/tmp.u86xsTYwr3 /tmp/tmp.saxvt7dpq2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0kbdtIVWzc +++ mktemp ++ local LAST_ERR=/tmp/tmp.3xg0gABuq6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0kbdtIVWzc ++ cat /tmp/tmp.3xg0gABuq6 ++ rm /tmp/tmp.0kbdtIVWzc /tmp/tmp.3xg0gABuq6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bqovsHdujX +++ mktemp ++ local LAST_ERR=/tmp/tmp.TC3NSdXiV0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bqovsHdujX ++ cat /tmp/tmp.TC3NSdXiV0 ++ rm /tmp/tmp.bqovsHdujX /tmp/tmp.TC3NSdXiV0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yDgGWENd15 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9Lrl6BqCBQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yDgGWENd15 ++ cat /tmp/tmp.9Lrl6BqCBQ ++ rm /tmp/tmp.yDgGWENd15 /tmp/tmp.9Lrl6BqCBQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MnOCC67ptU +++ mktemp ++ local LAST_ERR=/tmp/tmp.ESOfq1SWcK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MnOCC67ptU ++ cat /tmp/tmp.ESOfq1SWcK ++ rm /tmp/tmp.MnOCC67ptU /tmp/tmp.ESOfq1SWcK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xJs0fLq4J1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lc5pwzolQC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xJs0fLq4J1 ++ cat /tmp/tmp.lc5pwzolQC ++ rm /tmp/tmp.xJs0fLq4J1 /tmp/tmp.lc5pwzolQC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8WOxfyHAHw +++ mktemp ++ local LAST_ERR=/tmp/tmp.HpmCu4Fwfi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8WOxfyHAHw ++ cat /tmp/tmp.HpmCu4Fwfi ++ rm /tmp/tmp.8WOxfyHAHw /tmp/tmp.HpmCu4Fwfi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iyUVkcbecj +++ mktemp ++ local LAST_ERR=/tmp/tmp.LVvsJiFLDZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iyUVkcbecj ++ cat /tmp/tmp.LVvsJiFLDZ ++ rm /tmp/tmp.iyUVkcbecj /tmp/tmp.LVvsJiFLDZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jcdzq2gEgY +++ mktemp ++ local LAST_ERR=/tmp/tmp.82zFaMtna9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Jcdzq2gEgY ++ cat /tmp/tmp.82zFaMtna9 ++ rm /tmp/tmp.Jcdzq2gEgY /tmp/tmp.82zFaMtna9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4rrgxRy7Im +++ mktemp ++ local LAST_ERR=/tmp/tmp.upP9GHEiEj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4rrgxRy7Im ++ cat /tmp/tmp.upP9GHEiEj ++ rm /tmp/tmp.4rrgxRy7Im /tmp/tmp.upP9GHEiEj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E22P2N9k9y +++ mktemp ++ local LAST_ERR=/tmp/tmp.j9mmU2xEZH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E22P2N9k9y ++ cat /tmp/tmp.j9mmU2xEZH ++ rm /tmp/tmp.E22P2N9k9y /tmp/tmp.j9mmU2xEZH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iQAWepATNY +++ mktemp ++ local LAST_ERR=/tmp/tmp.uc6tDmiCWm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iQAWepATNY ++ cat /tmp/tmp.uc6tDmiCWm ++ rm /tmp/tmp.iQAWepATNY /tmp/tmp.uc6tDmiCWm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4kz7SuXJwl +++ mktemp ++ local LAST_ERR=/tmp/tmp.ebXWX1Nlkb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4kz7SuXJwl ++ cat /tmp/tmp.ebXWX1Nlkb ++ rm /tmp/tmp.4kz7SuXJwl /tmp/tmp.ebXWX1Nlkb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w7Dfs6bUUJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.lIvF2hBK75 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w7Dfs6bUUJ ++ cat /tmp/tmp.lIvF2hBK75 ++ rm /tmp/tmp.w7Dfs6bUUJ /tmp/tmp.lIvF2hBK75 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vb4sgMryE5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.x1QqAPVtAn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vb4sgMryE5 ++ cat /tmp/tmp.x1QqAPVtAn ++ rm /tmp/tmp.Vb4sgMryE5 /tmp/tmp.x1QqAPVtAn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6hGLJLH9SF +++ mktemp ++ local LAST_ERR=/tmp/tmp.bjWGMsm7Mr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6hGLJLH9SF ++ cat /tmp/tmp.bjWGMsm7Mr ++ rm /tmp/tmp.6hGLJLH9SF /tmp/tmp.bjWGMsm7Mr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2qja8U9pTw +++ mktemp ++ local LAST_ERR=/tmp/tmp.WDsrUIcLcd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2qja8U9pTw ++ cat /tmp/tmp.WDsrUIcLcd ++ rm /tmp/tmp.2qja8U9pTw /tmp/tmp.WDsrUIcLcd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pecwdTXRoS +++ mktemp ++ local LAST_ERR=/tmp/tmp.1ikvkquMzK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pecwdTXRoS ++ cat /tmp/tmp.1ikvkquMzK ++ rm /tmp/tmp.pecwdTXRoS /tmp/tmp.1ikvkquMzK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DtnDNoNSYw +++ mktemp ++ local LAST_ERR=/tmp/tmp.xviaNDnLfK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DtnDNoNSYw ++ cat /tmp/tmp.xviaNDnLfK ++ rm /tmp/tmp.DtnDNoNSYw /tmp/tmp.xviaNDnLfK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VXvWp1DjeC +++ mktemp ++ local LAST_ERR=/tmp/tmp.1uXoL8weY0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VXvWp1DjeC ++ cat /tmp/tmp.1uXoL8weY0 ++ rm /tmp/tmp.VXvWp1DjeC /tmp/tmp.1uXoL8weY0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lfaw0UlCrq +++ mktemp ++ local LAST_ERR=/tmp/tmp.XRsGJrHC5T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lfaw0UlCrq ++ cat /tmp/tmp.XRsGJrHC5T ++ rm /tmp/tmp.Lfaw0UlCrq /tmp/tmp.XRsGJrHC5T ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LAvssFVoFe +++ mktemp ++ local LAST_ERR=/tmp/tmp.iWrdjiwGh7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LAvssFVoFe ++ cat /tmp/tmp.iWrdjiwGh7 ++ rm /tmp/tmp.LAvssFVoFe /tmp/tmp.iWrdjiwGh7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DxS3nTFBBg +++ mktemp ++ local LAST_ERR=/tmp/tmp.QgC2RM0Jgz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DxS3nTFBBg ++ cat /tmp/tmp.QgC2RM0Jgz ++ rm /tmp/tmp.DxS3nTFBBg /tmp/tmp.QgC2RM0Jgz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OjBXZGq2aw +++ mktemp ++ local LAST_ERR=/tmp/tmp.0AlAvfhQ7a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OjBXZGq2aw ++ cat /tmp/tmp.0AlAvfhQ7a ++ rm /tmp/tmp.OjBXZGq2aw /tmp/tmp.0AlAvfhQ7a ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J06NIaDQ8s +++ mktemp ++ local LAST_ERR=/tmp/tmp.0Yk5PDXZip ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J06NIaDQ8s ++ cat /tmp/tmp.0Yk5PDXZip ++ rm /tmp/tmp.J06NIaDQ8s /tmp/tmp.0Yk5PDXZip ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OZWfBehdKD +++ mktemp ++ local LAST_ERR=/tmp/tmp.x8usbn1PKG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OZWfBehdKD ++ cat /tmp/tmp.x8usbn1PKG ++ rm /tmp/tmp.OZWfBehdKD /tmp/tmp.x8usbn1PKG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0w3kksbqfz +++ mktemp ++ local LAST_ERR=/tmp/tmp.nLQzMejErN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0w3kksbqfz ++ cat /tmp/tmp.nLQzMejErN ++ rm /tmp/tmp.0w3kksbqfz /tmp/tmp.nLQzMejErN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hdQYOxQeav +++ mktemp ++ local LAST_ERR=/tmp/tmp.a6uK1gHbz0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hdQYOxQeav ++ cat /tmp/tmp.a6uK1gHbz0 ++ rm /tmp/tmp.hdQYOxQeav /tmp/tmp.a6uK1gHbz0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z0AJN7X2OL +++ mktemp ++ local LAST_ERR=/tmp/tmp.NebaknWRni ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z0AJN7X2OL ++ cat /tmp/tmp.NebaknWRni ++ rm /tmp/tmp.Z0AJN7X2OL /tmp/tmp.NebaknWRni ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VLHerdcvqp +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ehi1dKTw3q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VLHerdcvqp ++ cat /tmp/tmp.Ehi1dKTw3q ++ rm /tmp/tmp.VLHerdcvqp /tmp/tmp.Ehi1dKTw3q ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7VBEt2Rkmx ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.kJD7SEu67G +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7VBEt2Rkmx +++++ cat /tmp/tmp.kJD7SEu67G +++++ rm /tmp/tmp.7VBEt2Rkmx /tmp/tmp.kJD7SEu67G +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jUUA0NwGWs +++ mktemp ++ local LAST_ERR=/tmp/tmp.gos6y9kNJ6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jUUA0NwGWs ++ cat /tmp/tmp.gos6y9kNJ6 ++ rm /tmp/tmp.jUUA0NwGWs /tmp/tmp.gos6y9kNJ6 ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 2 haproxy some-name + local generation=2 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IJEnNwldmy +++ mktemp ++ local LAST_ERR=/tmp/tmp.jdMoS86ixK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IJEnNwldmy ++ cat /tmp/tmp.jdMoS86ixK ++ rm /tmp/tmp.IJEnNwldmy /tmp/tmp.jdMoS86ixK ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jBqS4CaHvH ++ mktemp + local LAST_ERR=/tmp/tmp.EYKDo7fnhp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jBqS4CaHvH secret/my-cluster-secrets patched + cat /tmp/tmp.EYKDo7fnhp + rm /tmp/tmp.jBqS4CaHvH /tmp/tmp.EYKDo7fnhp + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lMASdVgnGu +++ mktemp ++ local LAST_ERR=/tmp/tmp.NuNWlrX7i9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lMASdVgnGu ++ cat /tmp/tmp.NuNWlrX7i9 ++ rm /tmp/tmp.lMASdVgnGu /tmp/tmp.NuNWlrX7i9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YWWmfpyHx8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.p3m3WsrJSH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YWWmfpyHx8 ++ cat /tmp/tmp.p3m3WsrJSH ++ rm /tmp/tmp.YWWmfpyHx8 /tmp/tmp.p3m3WsrJSH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.271fzklY82 +++ mktemp ++ local LAST_ERR=/tmp/tmp.S6kcKel5Go ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.271fzklY82 ++ cat /tmp/tmp.S6kcKel5Go ++ rm /tmp/tmp.271fzklY82 /tmp/tmp.S6kcKel5Go ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AHn7QaKnTI +++ mktemp ++ local LAST_ERR=/tmp/tmp.iZWg4jZI47 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AHn7QaKnTI ++ cat /tmp/tmp.iZWg4jZI47 ++ rm /tmp/tmp.AHn7QaKnTI /tmp/tmp.iZWg4jZI47 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jNPO9q0mZR +++ mktemp ++ local LAST_ERR=/tmp/tmp.EEce6oZnYT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jNPO9q0mZR ++ cat /tmp/tmp.EEce6oZnYT ++ rm /tmp/tmp.jNPO9q0mZR /tmp/tmp.EEce6oZnYT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tCiZBvLIlp +++ mktemp ++ local LAST_ERR=/tmp/tmp.CP8yDqh8k0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tCiZBvLIlp ++ cat /tmp/tmp.CP8yDqh8k0 ++ rm /tmp/tmp.tCiZBvLIlp /tmp/tmp.CP8yDqh8k0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZEzIkMayeO +++ mktemp ++ local LAST_ERR=/tmp/tmp.nuxab7Frpj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZEzIkMayeO ++ cat /tmp/tmp.nuxab7Frpj ++ rm /tmp/tmp.ZEzIkMayeO /tmp/tmp.nuxab7Frpj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h9q11WBfeD +++ mktemp ++ local LAST_ERR=/tmp/tmp.nOTq6FyzKd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h9q11WBfeD ++ cat /tmp/tmp.nOTq6FyzKd ++ rm /tmp/tmp.h9q11WBfeD /tmp/tmp.nOTq6FyzKd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.08IQm39cMY +++ mktemp ++ local LAST_ERR=/tmp/tmp.Yb17x38FQ5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.08IQm39cMY ++ cat /tmp/tmp.Yb17x38FQ5 ++ rm /tmp/tmp.08IQm39cMY /tmp/tmp.Yb17x38FQ5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xzb9W9VGTl +++ mktemp ++ local LAST_ERR=/tmp/tmp.mICZ7JbVlm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Xzb9W9VGTl ++ cat /tmp/tmp.mICZ7JbVlm ++ rm /tmp/tmp.Xzb9W9VGTl /tmp/tmp.mICZ7JbVlm ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tnMe9PumH7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.S04TJwXjgz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tnMe9PumH7 ++ cat /tmp/tmp.S04TJwXjgz ++ rm /tmp/tmp.tnMe9PumH7 /tmp/tmp.S04TJwXjgz ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vSN64Dx1vy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.64DRqJ0VdK +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vSN64Dx1vy +++++ cat /tmp/tmp.64DRqJ0VdK +++++ rm /tmp/tmp.vSN64Dx1vy /tmp/tmp.64DRqJ0VdK +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IFzxTvJJlI +++ mktemp ++ local LAST_ERR=/tmp/tmp.PyCQWg8tRI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IFzxTvJJlI ++ cat /tmp/tmp.PyCQWg8tRI ++ rm /tmp/tmp.IFzxTvJJlI /tmp/tmp.PyCQWg8tRI ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-3-57.sql ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SrbFWTFMgv +++ mktemp ++ local LAST_ERR=/tmp/tmp.MMfPoM4mhX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SrbFWTFMgv ++ cat /tmp/tmp.MMfPoM4mhX ++ rm /tmp/tmp.SrbFWTFMgv /tmp/tmp.MMfPoM4mhX ++ return 0 + client_pod=pxc-client-857d976497-npszm + wait_pod pxc-client-857d976497-npszm + local pod=pxc-client-857d976497-npszm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-npszm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-npszm condition met waiting for pod/pxc-client-857d976497-npszm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.OGJBrKbY7x/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-3.sql /tmp/tmp.OGJBrKbY7x/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 3 haproxy some-name + local generation=3 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LBjxyAGFqq +++ mktemp ++ local LAST_ERR=/tmp/tmp.j4W9WrqH3p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LBjxyAGFqq ++ cat /tmp/tmp.j4W9WrqH3p ++ rm /tmp/tmp.LBjxyAGFqq /tmp/tmp.j4W9WrqH3p ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + destroy users-22226 + local namespace=users-22226 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u ++ get_operator_pod + tee /tmp/tmp.OGJBrKbY7x/operator.log ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.pwLDDZoCpS +++ mktemp ++ local LAST_ERR=/tmp/tmp.XmumDLFhb4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pwLDDZoCpS ++ cat /tmp/tmp.XmumDLFhb4 ++ rm /tmp/tmp.pwLDDZoCpS /tmp/tmp.XmumDLFhb4 ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-75d958d548-5877h ++ mktemp + local LAST_OUT=/tmp/tmp.GuMnTFRQmC ++ mktemp + local LAST_ERR=/tmp/tmp.VtaLdy7bnf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-75d958d548-5877h + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GuMnTFRQmC + cat /tmp/tmp.VtaLdy7bnf + rm /tmp/tmp.GuMnTFRQmC /tmp/tmp.VtaLdy7bnf + return 0 2025-11-27T11:54:39.605Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1454000"} 2025-11-27T11:54:39.606Z INFO setup Manager starting up {"gitCommit": "1eb37b20ea39a043846a217c61acb7bcd9d0d5c9", "gitBranch": "PR-2193-1eb37b20", "buildTime": "2025-11-27T09:39:01Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-11-27T11:54:39.609Z INFO setup Registering Components. 2025-11-27T11:54:40.282Z INFO controller-runtime.metrics Starting metrics server 2025-11-27T11:54:40.282Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-11-27T11:54:40.282Z INFO setup Starting the Cmd. 2025-11-27T11:54:40.283Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-11-27T11:54:40.283Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-11-27T11:54:40.283Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-11-27T11:54:40.283Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-11-27T11:54:40.283Z INFO controller-runtime.webhook Starting webhook server 2025-11-27T11:54:40.283Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-11-27T11:54:40.383Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-11-27T11:54:40.413Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-11-27T11:54:40.414Z DEBUG events percona-xtradb-cluster-operator-75d958d548-5877h_6b1a2289-313c-429d-9e53-892eb518e2d6 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"01c7e483-cbe6-47cf-a0f3-541a69af0b1d","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1764244480402927009"}, "reason": "LeaderElection"} 2025-11-27T11:54:40.414Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-11-27T11:54:40.414Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-11-27T11:54:40.414Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-11-27T11:54:40.415Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-11-27T11:54:40.515Z INFO Starting Controller {"controller": "pxc-controller"} 2025-11-27T11:54:40.515Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-11-27T11:54:40.515Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-11-27T11:54:40.515Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-11-27T11:54:40.516Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-11-27T11:54:40.516Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-11-27T11:55:15.723Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "69549c2a-a6cd-4ae9-8c66-b2ec9e49d070", "version": "1.19.0"} 2025-11-27T11:55:15.841Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "69549c2a-a6cd-4ae9-8c66-b2ec9e49d070", "secrets": "my-cluster-secrets"} 2025-11-27T11:55:16.057Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "69549c2a-a6cd-4ae9-8c66-b2ec9e49d070", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-27T11:55:16.175Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "69549c2a-a6cd-4ae9-8c66-b2ec9e49d070", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-27T11:55:16.210Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "69549c2a-a6cd-4ae9-8c66-b2ec9e49d070", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-27T11:55:16.254Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "69549c2a-a6cd-4ae9-8c66-b2ec9e49d070", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-27T11:55:16.315Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "69549c2a-a6cd-4ae9-8c66-b2ec9e49d070", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-27T11:55:16.439Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "69549c2a-a6cd-4ae9-8c66-b2ec9e49d070", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-27T11:55:16.594Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "69549c2a-a6cd-4ae9-8c66-b2ec9e49d070", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-27T11:55:17.079Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "32e8a9a8-d809-447e-998c-cf279f0784ab", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-27T11:55:17.099Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "32e8a9a8-d809-447e-998c-cf279f0784ab", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-27T11:56:33.822Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "b5c644cd-8371-4994-89aa-37b6bd5d0898", "user": "operator"} 2025-11-27T11:56:33.849Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "b5c644cd-8371-4994-89aa-37b6bd5d0898", "user": "monitor"} 2025-11-27T11:56:33.878Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "b5c644cd-8371-4994-89aa-37b6bd5d0898"} 2025-11-27T11:56:33.910Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "b5c644cd-8371-4994-89aa-37b6bd5d0898", "user": "xtrabackup"} 2025-11-27T11:56:33.936Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "b5c644cd-8371-4994-89aa-37b6bd5d0898"} 2025-11-27T11:56:33.948Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "b5c644cd-8371-4994-89aa-37b6bd5d0898", "err": "get primary pxc pod: not found"} 2025-11-27T11:56:38.792Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "9bd41856-cfe6-4e0a-8f0c-1cbfd07e588f", "err": "get primary pxc pod: not found"} 2025-11-27T11:56:43.946Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "2116e651-0b58-46c7-b46d-e6787ccf13eb", "err": "get primary pxc pod: not found"} 2025-11-27T11:56:49.082Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8bdb6ca3-681e-46df-ad89-bf19c31afde4", "err": "get primary pxc pod: not found"} 2025-11-27T11:59:00.246Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "5d653064-3aed-4c99-b6ee-172791fae7d6", "user": "root"} 2025-11-27T11:59:00.282Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "5d653064-3aed-4c99-b6ee-172791fae7d6", "user": "replication"} 2025-11-27T11:59:00.334Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "5d653064-3aed-4c99-b6ee-172791fae7d6", "new version": "5.7.44-48-57"} 2025-11-27T11:59:01.966Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "5d653064-3aed-4c99-b6ee-172791fae7d6"} 2025-11-27T11:59:06.986Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "86e02c5b-1688-4a04-b92e-a74124f8bfe2"} 2025-11-27T11:59:12.178Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "1fceb53c-5da0-4a3a-86eb-6b3860144b85"} 2025-11-27T11:59:17.382Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "1de9b6b7-cf84-4768-a00d-c226b7bb9176"} 2025-11-27T11:59:22.687Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "2715c162-c7aa-420f-8795-7cf116d23ae3"} 2025-11-27T11:59:27.857Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "7fd8b41e-3c7c-4894-93bc-6a1a79069afc"} 2025-11-27T11:59:33.081Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "7964f599-a1ca-4869-b603-321f4284ab68"} 2025-11-27T11:59:38.599Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "702e4b73-8685-4424-a746-40de3c49446e"} 2025-11-27T11:59:43.472Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "a646f5c0-3c93-45e1-a335-401c86da9a13"} 2025-11-27T11:59:48.683Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "f767f19e-2f86-4009-9f9b-104b131d4581"} 2025-11-27T11:59:54.095Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "32eb8d9e-973c-4076-91c3-d9727a51542d"} 2025-11-27T11:59:58.978Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "6acd97b9-882f-4f79-8c49-d9ede283699f"} 2025-11-27T12:00:04.470Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "13c945cd-e310-4dd8-b417-b22cd37b2582"} 2025-11-27T12:00:10.082Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "43864267-08f6-4e14-adb9-df83225d70c6"} 2025-11-27T12:00:15.106Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "09b4cd80-1e2c-4aab-8e89-b006e733c91e"} 2025-11-27T12:00:20.356Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "f8ca873a-aba1-4dfb-a52a-b5185ad77338"} 2025-11-27T12:00:25.396Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "57f37e8b-e805-4bec-9890-4e2dcbede5c3"} 2025-11-27T12:00:30.883Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "999b8f71-7946-4546-96cb-c9a1fc0d5238"} 2025-11-27T12:00:35.896Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "6b34567f-86cc-4cff-a2c0-cb3ab9a9323f"} 2025-11-27T12:00:41.289Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "2731643c-c3aa-4064-b62b-86315474b782"} 2025-11-27T12:00:42.493Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "63497a71-9f35-4387-9c90-d1a3016f5c95", "user": "root"} 2025-11-27T12:00:42.509Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "63497a71-9f35-4387-9c90-d1a3016f5c95", "user": "root"} 2025-11-27T12:00:42.528Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "63497a71-9f35-4387-9c90-d1a3016f5c95", "secret": "some-name-mysql-init", "user": "root"} 2025-11-27T12:00:45.025Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "63497a71-9f35-4387-9c90-d1a3016f5c95"} 2025-11-27T12:00:45.045Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "63497a71-9f35-4387-9c90-d1a3016f5c95", "user": "root"} 2025-11-27T12:00:46.608Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "63497a71-9f35-4387-9c90-d1a3016f5c95"} 2025-11-27T12:00:51.987Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "89ab11f4-802e-40e7-a1d2-e56121f40ee2"} 2025-11-27T12:00:57.071Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "19c6b7e9-3f55-4bef-9fd6-d2326f09a60e"} 2025-11-27T12:01:02.489Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "5d9b4a5b-6ec1-4f2b-bb44-6cd3129d0235"} 2025-11-27T12:01:03.089Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "312315ad-a758-41e2-8424-aea90684df7d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:01:03.158Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "312315ad-a758-41e2-8424-aea90684df7d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:01:06.063Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "312315ad-a758-41e2-8424-aea90684df7d", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:01:31.742Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "021395fb-cc69-4311-ab5a-a552096df0bf", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:01:34.260Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "d71b6001-1fee-4985-9fbc-2dece7f3ffc8", "user": "proxyadmin"} 2025-11-27T12:01:34.260Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "d71b6001-1fee-4985-9fbc-2dece7f3ffc8", "user": "proxyadmin"} 2025-11-27T12:01:34.288Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "d71b6001-1fee-4985-9fbc-2dece7f3ffc8", "user": "proxyadmin"} 2025-11-27T12:01:34.289Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "572293cd-3f9d-4919-a643-73399ffc20fc", "error": "syncusers: ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:797) : Failed to load user list from ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "syncusers: ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:797) : Failed to load user list from ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:979\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:01:34.308Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "d71b6001-1fee-4985-9fbc-2dece7f3ffc8", "user": "proxyadmin"} 2025-11-27T12:01:34.308Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "d71b6001-1fee-4985-9fbc-2dece7f3ffc8", "last-applied-secret": "100ff677e872fdd492f227bff6367143c551f9bc425e06db054f0a1598af9c1b"} 2025-11-27T12:01:34.312Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "d71b6001-1fee-4985-9fbc-2dece7f3ffc8", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:01:34.357Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "d71b6001-1fee-4985-9fbc-2dece7f3ffc8", "err": "get primary pxc pod: not found"} 2025-11-27T12:01:36.119Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "d71b6001-1fee-4985-9fbc-2dece7f3ffc8", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:01:39.875Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "9b331718-25f5-4c8f-b3c4-c79f04513849", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:01:57.891Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "6a192a56-c88b-4f74-ae73-acdcf1fcf00a", "err": "get primary pxc pod: not found"} 2025-11-27T12:02:03.938Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "5da71400-2334-4374-9584-425f5e53d531", "error": "syncusers: ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "syncusers: ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:979\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:02:08.831Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "0f099783-33e4-45ac-b10b-1d6480ff2b8a", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:02:08.874Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "0f099783-33e4-45ac-b10b-1d6480ff2b8a", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:02:10.596Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "0f099783-33e4-45ac-b10b-1d6480ff2b8a"} 2025-11-27T12:02:18.846Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "fe6defde-b454-42ac-aaf4-352122da0149", "user": "xtrabackup"} 2025-11-27T12:02:18.860Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "fe6defde-b454-42ac-aaf4-352122da0149", "user": "xtrabackup"} 2025-11-27T12:02:18.878Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "fe6defde-b454-42ac-aaf4-352122da0149", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-27T12:02:18.897Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "fe6defde-b454-42ac-aaf4-352122da0149", "user": "xtrabackup"} 2025-11-27T12:02:18.897Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "fe6defde-b454-42ac-aaf4-352122da0149", "last-applied-secret": "eba17d0621879453ca8c7af89de701155d52156792a65575b2eb42953bdc3c1f"} 2025-11-27T12:02:18.900Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "fe6defde-b454-42ac-aaf4-352122da0149", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:02:21.503Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "fe6defde-b454-42ac-aaf4-352122da0149"} 2025-11-27T12:03:57.085Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "622f9573-64c4-422c-ba5a-26f2e3faee85", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-22226 on 34.118.224.10:53: no such host"} 2025-11-27T12:04:02.367Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c1be49ec-35bc-4ab3-9d8d-67b7ddf59334", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-22226 on 34.118.224.10:53: no such host"} 2025-11-27T12:04:08.104Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "d1b2d087-47d9-4a9d-b375-bd891231615b", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-22226 on 34.118.224.10:53: no such host"} 2025-11-27T12:04:13.435Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "84f1beb0-44f7-402d-abd2-a87936735a35", "primary name": "some-name-pxc-0.some-name-pxc.users-22226.svc.cluster.local"} 2025-11-27T12:04:18.570Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "4ca7c83c-8f58-436f-b16f-75c8719a64cc", "primary name": "some-name-pxc-0.some-name-pxc.users-22226.svc.cluster.local"} 2025-11-27T12:04:23.707Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "856b6062-45df-48c8-ac70-418e419fe9ca", "primary name": "some-name-pxc-0.some-name-pxc.users-22226.svc.cluster.local"} 2025-11-27T12:04:28.853Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "0731cfaf-12df-4913-9190-1522719c4a25", "primary name": "some-name-pxc-0.some-name-pxc.users-22226.svc.cluster.local"} 2025-11-27T12:04:33.985Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "dd54d016-3ca6-4309-875b-3a4fe202638d", "primary name": "some-name-pxc-0.some-name-pxc.users-22226.svc.cluster.local"} 2025-11-27T12:04:39.115Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "e6bdc87f-3cc9-4d8c-884d-60b9b6746494", "primary name": "some-name-pxc-0.some-name-pxc.users-22226.svc.cluster.local"} 2025-11-27T12:04:46.772Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "9554e118-6d78-4f1b-94f1-fa188b51e4d7"} 2025-11-27T12:04:51.616Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "6bc30c00-98ca-4717-b7d0-b6b7e6917f18"} 2025-11-27T12:04:54.465Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8596f62b-5945-41b7-ac6a-2163b3ca1ba3", "user": "monitor"} 2025-11-27T12:04:54.478Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8596f62b-5945-41b7-ac6a-2163b3ca1ba3", "user": "monitor"} 2025-11-27T12:04:54.503Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8596f62b-5945-41b7-ac6a-2163b3ca1ba3", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-27T12:04:54.522Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8596f62b-5945-41b7-ac6a-2163b3ca1ba3", "user": "monitor"} 2025-11-27T12:04:54.573Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8596f62b-5945-41b7-ac6a-2163b3ca1ba3", "user": "monitor"} 2025-11-27T12:04:54.573Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8596f62b-5945-41b7-ac6a-2163b3ca1ba3", "last-applied-secret": "34d6b431bcf55c9904d9e66abfff686bc62f03fd7a7f1e9a36179f313adbac34"} 2025-11-27T12:04:54.577Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8596f62b-5945-41b7-ac6a-2163b3ca1ba3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:04:57.463Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8596f62b-5945-41b7-ac6a-2163b3ca1ba3", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:05:38.766Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "38cb544c-ed20-47ee-b924-3fe67a9e24c9"} 2025-11-27T12:05:43.744Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "fb9ea821-2e63-466e-834d-d88c51b5330a"} 2025-11-27T12:05:48.750Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "ef1afe76-2b73-45ff-982e-21f210224323"} 2025-11-27T12:05:53.895Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "2cd30c44-fd38-4e2e-8fc6-d34eb1762837", "user": "operator"} 2025-11-27T12:05:53.908Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "2cd30c44-fd38-4e2e-8fc6-d34eb1762837", "user": "operator"} 2025-11-27T12:05:53.932Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "2cd30c44-fd38-4e2e-8fc6-d34eb1762837", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-27T12:05:53.960Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "2cd30c44-fd38-4e2e-8fc6-d34eb1762837", "user": "operator"} 2025-11-27T12:05:53.960Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "2cd30c44-fd38-4e2e-8fc6-d34eb1762837", "last-applied-secret": "1951f7946b7aee80946e4d3c8b8dcf687519b7ffa65d3482328481703bba0c27"} 2025-11-27T12:05:53.969Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "2cd30c44-fd38-4e2e-8fc6-d34eb1762837", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:05:55.460Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "399203f5-0f2e-49d7-a02d-b8e86f50f46e", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:06:38.984Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "763727cb-4554-40b8-b411-631f8f464b2e"} 2025-11-27T12:06:42.295Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "a0ac1d67-42ad-436e-881a-b387f46db491"} 2025-11-27T12:06:47.716Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "03565def-7ccb-4b1f-90bf-7a971d48e206"} 2025-11-27T12:06:52.983Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "41a84d68-877e-4d61-844c-8362b02fba74"} 2025-11-27T12:06:58.027Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "37e769e1-0545-4d2f-963d-e268ba8d4c38"} 2025-11-27T12:06:58.433Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "secrets": "my-cluster-secrets-2"} 2025-11-27T12:06:58.433Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "root"} 2025-11-27T12:06:58.449Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "root"} 2025-11-27T12:06:58.469Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "secret": "some-name-mysql-init", "user": "root"} 2025-11-27T12:07:00.922Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0"} 2025-11-27T12:07:00.950Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "root"} 2025-11-27T12:07:00.950Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "operator"} 2025-11-27T12:07:00.962Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "operator"} 2025-11-27T12:07:00.984Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-27T12:07:01.009Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "operator"} 2025-11-27T12:07:01.009Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "monitor"} 2025-11-27T12:07:01.020Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "monitor"} 2025-11-27T12:07:01.043Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-27T12:07:01.059Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "monitor"} 2025-11-27T12:07:01.084Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "monitor"} 2025-11-27T12:07:01.084Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "xtrabackup"} 2025-11-27T12:07:01.095Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "xtrabackup"} 2025-11-27T12:07:01.149Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-27T12:07:01.173Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "xtrabackup"} 2025-11-27T12:07:01.173Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "replication"} 2025-11-27T12:07:01.183Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "replication"} 2025-11-27T12:07:01.235Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-27T12:07:01.261Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "replication"} 2025-11-27T12:07:01.261Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "proxyadmin"} 2025-11-27T12:07:01.279Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "proxyadmin"} 2025-11-27T12:07:01.307Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "user": "proxyadmin"} 2025-11-27T12:07:01.307Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "last-applied-secret": "6f6655bf5e921d847b36948a052f971bb8a1a4e32fa2331151f6e4e16158b1dd"} 2025-11-27T12:07:01.307Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "last-applied-secret": "6f6655bf5e921d847b36948a052f971bb8a1a4e32fa2331151f6e4e16158b1dd"} 2025-11-27T12:07:01.312Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:07:01.376Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:07:03.381Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "aa51de2a-d17c-49be-b153-45326eb395c0", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:08:39.545Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "6603b9c0-9061-4556-ac99-12c82d6d1ad8", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-22226 on 34.118.224.10:53: no such host"} 2025-11-27T12:08:44.778Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "899f0881-9bb7-4a2c-bd7d-9f233a26cbc3", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.129.233.77:33062: connect: connection refused"} 2025-11-27T12:08:49.946Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "2c5c167b-5b43-4a5b-8a98-74f77fc5e95d", "primary name": "some-name-pxc-0.some-name-pxc.users-22226.svc.cluster.local"} 2025-11-27T12:08:55.088Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8bb50efb-55c5-4255-a441-be8d20296d12", "primary name": "some-name-pxc-0.some-name-pxc.users-22226.svc.cluster.local"} 2025-11-27T12:09:00.254Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "442bd9ee-7a5d-4d1f-9691-57fcf75ef297", "primary name": "some-name-pxc-0.some-name-pxc.users-22226.svc.cluster.local"} 2025-11-27T12:09:05.394Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "223f749f-386e-4f64-a48e-f15e39a366e1", "primary name": "some-name-pxc-0.some-name-pxc.users-22226.svc.cluster.local"} 2025-11-27T12:09:10.532Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "20d0090b-b7d0-4a8d-be9b-8bb2a7d67617", "primary name": "some-name-pxc-0.some-name-pxc.users-22226.svc.cluster.local"} 2025-11-27T12:09:15.675Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "0e3de668-f3c1-465c-a969-facd940065c7", "primary name": "some-name-pxc-0.some-name-pxc.users-22226.svc.cluster.local"} 2025-11-27T12:09:23.807Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "7be342c5-19f6-45ae-9b5d-90b460fe1145"} 2025-11-27T12:09:28.135Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "e2a3a70b-e494-4691-bdc4-249abcb658c6"} 2025-11-27T12:09:30.477Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "645e1cc1-305e-4ae9-837d-31fa8ffbbd2e", "user": "operator"} 2025-11-27T12:09:30.490Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "645e1cc1-305e-4ae9-837d-31fa8ffbbd2e", "user": "operator"} 2025-11-27T12:09:30.507Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "645e1cc1-305e-4ae9-837d-31fa8ffbbd2e", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-27T12:09:30.523Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "645e1cc1-305e-4ae9-837d-31fa8ffbbd2e", "user": "operator"} 2025-11-27T12:09:30.523Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "645e1cc1-305e-4ae9-837d-31fa8ffbbd2e", "last-applied-secret": "9292399bccc56911efdba71a9ba1ed6d029a4df9ef1aa4009b1e6138a2b7aec7"} 2025-11-27T12:09:30.527Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "645e1cc1-305e-4ae9-837d-31fa8ffbbd2e", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:09:34.160Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "645e1cc1-305e-4ae9-837d-31fa8ffbbd2e", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22226.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:10:04.207Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "9c339d51-01d8-4abd-8d69-9b77e0574ea0"} 2025-11-27T12:10:08.829Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "b036aba3-ae45-475b-b02a-49485c4e01f1"} 2025-11-27T12:10:13.783Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "7f41f6da-5412-4872-9d5d-71d84f86468a"} 2025-11-27T12:10:19.086Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "07608313-2c79-4e8e-bd3f-84c51cf1f53d"} 2025-11-27T12:10:24.776Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "30cd69e9-f9fe-4bdb-b0bf-49e6ee50b8f6"} 2025-11-27T12:10:30.117Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "9c13b326-ea45-4957-8457-3ab690a65df8"} 2025-11-27T12:10:35.209Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "de6a5a89-9e4e-4a8e-a11b-9c92895232ab"} 2025-11-27T12:10:41.048Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "e0bea7b2-20fd-4612-8222-f42a640aa06c"} 2025-11-27T12:10:45.796Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "f63e321b-76e3-4937-ab7b-56d9441b4fb9"} 2025-11-27T12:10:50.779Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "47d46184-415f-4528-83f8-d72e4a5ab4b9"} 2025-11-27T12:10:56.196Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "195ea2b6-3739-4ec0-b8d8-3067d74929b4"} 2025-11-27T12:11:01.315Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "eba35d22-b4bb-4fd6-949e-bc8f125e94b3"} 2025-11-27T12:11:06.597Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "9c32d9cc-440a-45ef-9313-e520d460be05"} 2025-11-27T12:11:11.809Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "d64a7353-ed9b-4d97-8d9d-efdb577a8a52"} 2025-11-27T12:11:17.101Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "d161fc85-fc22-402e-948a-410af9341e8e"} 2025-11-27T12:11:21.372Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "root"} 2025-11-27T12:11:21.387Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "root"} 2025-11-27T12:11:21.403Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "secret": "some-name-mysql-init", "user": "root"} 2025-11-27T12:11:22.670Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "7baed3bc-3622-488e-a358-9d070bfd7878"} 2025-11-27T12:11:23.695Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01"} 2025-11-27T12:11:23.716Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "root"} 2025-11-27T12:11:23.716Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "monitor"} 2025-11-27T12:11:23.727Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "monitor"} 2025-11-27T12:11:23.742Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-27T12:11:23.759Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "monitor"} 2025-11-27T12:11:23.777Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "monitor"} 2025-11-27T12:11:23.777Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "xtrabackup"} 2025-11-27T12:11:23.788Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "xtrabackup"} 2025-11-27T12:11:23.804Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-27T12:11:23.828Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "xtrabackup"} 2025-11-27T12:11:23.828Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "proxyadmin"} 2025-11-27T12:11:23.845Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "proxyadmin"} 2025-11-27T12:11:23.885Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "user": "proxyadmin"} 2025-11-27T12:11:23.885Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "last-applied-secret": "15270eb01e0c26bb8b767c3d80cae1be45b3fc4be7332994eda130186239ee44"} 2025-11-27T12:11:23.885Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "last-applied-secret": "15270eb01e0c26bb8b767c3d80cae1be45b3fc4be7332994eda130186239ee44"} 2025-11-27T12:11:23.888Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:11:23.949Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:11:26.214Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "c92baca1-ec8f-48f8-a4e6-f855db369a01", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:11:49.320Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "5e90fdd5-5c7c-4d36-b7ea-fd618d5af8b8", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:11:49.388Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "5e90fdd5-5c7c-4d36-b7ea-fd618d5af8b8", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-27T12:11:49.456Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "5e90fdd5-5c7c-4d36-b7ea-fd618d5af8b8", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-27T12:11:49.555Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "5e90fdd5-5c7c-4d36-b7ea-fd618d5af8b8", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-27T12:11:49.663Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "5e90fdd5-5c7c-4d36-b7ea-fd618d5af8b8", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-27T12:11:50.518Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "94b53a3a-9dd4-4d3e-bc60-423728987106", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-27T12:14:30.888Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "root"} 2025-11-27T12:14:30.902Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "root"} 2025-11-27T12:14:30.920Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "secret": "some-name-mysql-init", "user": "root"} 2025-11-27T12:14:30.940Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "root"} 2025-11-27T12:14:30.940Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "operator"} 2025-11-27T12:14:30.949Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "operator"} 2025-11-27T12:14:30.964Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-27T12:14:30.980Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "operator"} 2025-11-27T12:14:30.980Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "monitor"} 2025-11-27T12:14:30.991Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "monitor"} 2025-11-27T12:14:31.011Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-27T12:14:31.030Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "monitor"} 2025-11-27T12:14:31.030Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "xtrabackup"} 2025-11-27T12:14:31.039Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "xtrabackup"} 2025-11-27T12:14:31.067Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-27T12:14:31.087Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "xtrabackup"} 2025-11-27T12:14:31.088Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "replication"} 2025-11-27T12:14:31.097Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "replication"} 2025-11-27T12:14:31.120Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-27T12:14:31.137Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "last-applied-secret": "1951f7946b7aee80946e4d3c8b8dcf687519b7ffa65d3482328481703bba0c27"} 2025-11-27T12:14:31.137Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "user": "replication"} 2025-11-27T12:14:31.137Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "last-applied-secret": "1951f7946b7aee80946e4d3c8b8dcf687519b7ffa65d3482328481703bba0c27"} 2025-11-27T12:14:31.139Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:14:31.197Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "73f7d565-f289-4261-ba8e-a4e732fbaa1a", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:14:32.196Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "25007002-cb7d-4d1a-9152-06c770335889", "err": "failed to connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-22226 on 34.118.224.10:53: no such host"} 2025-11-27T12:16:04.608Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "a7dd65e5-e71b-4c2f-996c-a7df3f17af9c", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-22226 on 34.118.224.10:53: no such host"} 2025-11-27T12:16:04.923Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "65ed029f-048c-4af7-8a96-38c59a34b80d", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-22226 on 34.118.224.10:53: no such host"} 2025-11-27T12:16:57.848Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8af45ad2-5909-4bb1-843a-1cd7dde20015", "user": "monitor"} 2025-11-27T12:16:57.860Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8af45ad2-5909-4bb1-843a-1cd7dde20015", "user": "monitor"} 2025-11-27T12:16:57.882Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8af45ad2-5909-4bb1-843a-1cd7dde20015", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-27T12:16:57.900Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8af45ad2-5909-4bb1-843a-1cd7dde20015", "last-applied-secret": "583e17ba3eb7ac604a1f4d5aaf0c751c8b66f7cf1dc17e2e8378897474f35c76"} 2025-11-27T12:16:57.900Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8af45ad2-5909-4bb1-843a-1cd7dde20015", "user": "monitor"} 2025-11-27T12:16:57.903Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "8af45ad2-5909-4bb1-843a-1cd7dde20015", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:17:10.960Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "086f9923-51b3-4e06-be13-5a2aa1956fa9", "err": "get primary pxc pod: failed to get proxy connection: invalid connection"} 2025-11-27T12:17:14.339Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "0dca55d8-e946-4873-8490-6486a2e08488", "err": "get primary pxc pod: failed to get proxy connection: invalid connection"} 2025-11-27T12:17:24.538Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "b640c899-eec6-465c-a915-3afcf58400e7", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.235.62:3306: connect: connection refused"} 2025-11-27T12:17:27.788Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22226", "name": "some-name", "reconcileID": "af2c59cd-afca-45f4-91ef-8e4dd8ee7f3f", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.235.62:3306: connect: connection refused"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:856 [mysql] 2025/11/27 12:14:13 packets.go:58 unexpected EOF [mysql] 2025/11/27 12:17:07 packets.go:58 unexpected EOF [mysql] 2025/11/27 12:17:08 packets.go:58 unexpected EOF [mysql] 2025/11/27 12:17:09 packets.go:58 unexpected EOF [mysql] 2025/11/27 12:17:10 packets.go:58 unexpected EOF [mysql] 2025/11/27 12:17:11 packets.go:58 unexpected EOF [mysql] 2025/11/27 12:17:12 packets.go:58 unexpected EOF [mysql] 2025/11/27 12:17:13 packets.go:58 unexpected EOF [mysql] 2025/11/27 12:17:14 packets.go:58 unexpected EOF -  }, -  { -  }, -  { -  }, -  }, +  }, -  "100ff677e872fdd492f227bff6367143c551f9bc425e06db054f0a1598af9c1b", +  "15270eb01e0c26bb8b767c3d80cae1be45b3fc4be7332994eda130186239ee44", -  "1951f7946b7aee80946e4d3c8b8dcf687519b7ffa65d3482328481703bba0c27", +  "1951f7946b7aee80946e4d3c8b8dcf687519b7ffa65d3482328481703bba0c27", -  "34d6b431bcf55c9904d9e66abfff686bc62f03fd7a7f1e9a36179f313adbac34", +  "34d6b431bcf55c9904d9e66abfff686bc62f03fd7a7f1e9a36179f313adbac34", -  "5270eb01e0c26bb8b767c3d80cae1be45b3fc4be7332994eda130186239ee44", +  "583e17ba3eb7ac604a1f4d5aaf0c751c8b66f7cf1dc17e2e8378897474f35c76", -  "6f6655bf5e921d847b36948a052f971bb8a1a4e32fa2331151f6e4e16158b1dd", +  "6f6655bf5e921d847b36948a052f971bb8a1a4e32fa2331151f6e4e16158b1dd", -  "9292399bccc56911efdba71a9ba1ed6d029a4df9ef1aa4009b1e6138a2b7aec7", +  "9292399bccc56911efdba71a9ba1ed6d029a4df9ef1aa4009b1e6138a2b7aec7", +  "951f7946b7aee80946e4d3c8b8dcf687519b7ffa65d3482328481703bba0c27", -  Annotations: map[string]string{ +  Annotations: map[string]string{ +  APIVersion: "", -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  APIVersion: "v1", -  Args: []string{"logrotate"}, +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, -  CollisionCount: &0, +  CollisionCount: nil, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-11-27 11:55:16 +0000 UTC"}, -  CreationTimestamp: v1.Time{Time: s"2025-11-27 12:11:49 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-haproxy-7ffc4b8b8f", -  CurrentRevision: "some-name-haproxy-bb499bdb8", -  CurrentRevision: "some-name-proxysql-58fcfd77f8", -  CurrentRevision: "some-name-proxysql-5d47d47f59", -  CurrentRevision: "some-name-proxysql-5db9778574", -  CurrentRevision: "some-name-proxysql-67cd4bf8d6", -  CurrentRevision: "some-name-proxysql-769b66cc5c", -  CurrentRevision: "some-name-proxysql-7cc67559bd", -  CurrentRevision: "some-name-pxc-6c745d9549", -  CurrentRevision: "some-name-pxc-7d8f58d756", -  CurrentRevision: "some-name-pxc-b876d6f7c", -  CurrentRevision: "some-name-pxc-d49895985", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  "eba17d0621879453ca8c7af89de701155d52156792a65575b2eb42953bdc3c1f", -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, -  Env: []v1.EnvVar{ -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Generation: 5, -  Generation: 6, -  Generation: 7, -  Generation: 8, -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  ImagePullPolicy: "Always", +  "last-applied-secret": "100ff677e872fdd492f227bff6367143c551f9bc425e06db054f0a1598af9c1b", +  "last-applied-secret": "1951f7946b7aee80946e4d3c8b8dcf687519b7ffa65d3482328481703bba0c27", +  "last-applied-secret": "eba17d0621879453ca8c7af89de701155d52156792a65575b2eb42953bdc3c1f", +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", -  {Name: "IS_LOGCOLLECTOR", Value: "yes"}, -  {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, -  Name: "logrotate", -  Name: "logs", -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "SERVICE_TYPE", Value: "mysql"}, +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  ObservedGeneration: 5, -  ObservedGeneration: 6, -  ObservedGeneration: 7, -  ObservedGeneration: 8, -  Operation: "Update", -  Operation: "Update", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTAwZmY2NzdlODcyZmRkNDkyZjIyN2JmZjYzNjcxNDNjNTUxZjliYzQyNWUwNmRiMDU0ZjBhMTU5OGFmOWMxYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTk1MWY3OTQ2YjdhZWU4MDk0NmU0ZDNjOGI4ZGNmNjg3NTE5YjdmZmE2NWQzNDgyMzI4NDgxNzAzYmJhMGMyNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTk1MWY3OTQ2YjdhZWU4MDk0NmU0ZDNjOGI4ZGNmNjg3NTE5YjdmZmE2NWQzNDgyMzI4NDgxNzAzYmJhMGMyNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTUyNzBlYjAxZTBjMjZiYjhiNzY3YzNkODBjYWUxYmU0NWIzZmM0YmU3MzMyOTk0ZWRhMTMwMTg2MjM5ZWU0NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzRkNmI0MzFiY2Y1NWM5OTA0ZDllNjZhYmZmZjY4NmJjNjJmMDNmZDdhN2YxZTlhMzYxNzlmMzEzYWRiYWMzNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzRkNmI0MzFiY2Y1NWM5OTA0ZDllNjZhYmZmZjY4NmJjNjJmMDNmZDdhN2YxZTlhMzYxNzlmMzEzYWRiYWMzNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmY2NjU1YmY1ZTkyMWQ4NDdiMzY5NDhhMDUyZjk3MWJiOGExYTRlMzJmYTIzMzExNTFmNmU0ZTE2MTU4YjFkZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmY2NjU1YmY1ZTkyMWQ4NDdiMzY5NDhhMDUyZjk3MWJiOGExYTRlMzJmYTIzMzExNTFmNmU0ZTE2MTU4YjFkZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTI5MjM5OWJjY2M1NjkxMWVmZGJhNzFhOWJhMWVkNmQwMjlhNGRmOWVmMWFhNDAwOWIxZTYxMzhhMmI3YWVjNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTI5MjM5OWJjY2M1NjkxMWVmZGJhNzFhOWJhMWVkNmQwMjlhNGRmOWVmMWFhNDAwOWIxZTYxMzhhMmI3YWVjNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTk1MWY3OTQ2YjdhZWU4MDk0NmU0ZDNjOGI4ZGNmNjg3NTE5YjdmZmE2NWQzNDgyMzI4NDgxNzAzYmJhMGMyNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTk1MWY3OTQ2YjdhZWU4MDk0NmU0ZDNjOGI4ZGNmNjg3NTE5YjdmZmE2NWQzNDgyMzI4NDgxNzAzYmJhMGMyNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTgzZTE3YmEzZWI3YWM2MDRhMWY0ZDVhYWYwYzc1MWM4YjY2ZjdjZjFkYzE3ZTJlODM3ODg5NzQ3NGYzNWM3NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSJ9fSwic3BlYyI6eyJ2b2x1bWVzIjpbeyJuYW1lIjoiaGFwcm94eS1jdXN0b20iLCJjb25maWdNYXAi"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTk1MWY3OTQ2YjdhZWU4MDk0NmU0ZDNjOGI4ZGNmNjg3NTE5YjdmZmE2NWQzNDgyMzI4NDgxNzAzYmJhMGMyNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTUyNzBlYjAxZTBjMjZiYjhiNzY3YzNkODBjYWUxYmU0NWIzZmM0YmU3MzMyOTk0ZWRhMTMwMTg2MjM5ZWU0NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTUyNzBlYjAxZTBjMjZiYjhiNzY3YzNkODBjYWUxYmU0NWIzZmM0YmU3MzMyOTk0ZWRhMTMwMTg2MjM5ZWU0NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTUyNzBlYjAxZTBjMjZiYjhiNzY3YzNkODBjYWUxYmU0NWIzZmM0YmU3MzMyOTk0ZWRhMTMwMTg2MjM5ZWU0NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTkzLTFlYjM3YjIwIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM1LjciLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTUyNzBlYjAxZTBjMjZiYjhiNzY3YzNkODBjYWUxYmU0NWIzZmM0YmU3MzMyOTk0ZWRhMTMwMTg2MjM5ZWU0NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTkzLTFlYjM3YjIwIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzUuNyIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiI2MTM5MTMyIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmY2NjU1YmY1ZTkyMWQ4NDdiMzY5NDhhMDUyZjk3MWJiOGExYTRlMzJmYTIzMzExNTFmNmU0ZTE2MTU4YjFkZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmY2NjU1YmY1ZTkyMWQ4NDdiMzY5NDhhMDUyZjk3MWJiOGExYTRlMzJmYTIzMzExNTFmNmU0ZTE2MTU4YjFkZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZWJhMTdkMDYyMTg3OTQ1M2NhOGM3YWY4OWRlNzAxMTU1ZDUyMTU2NzkyYTY1NTc1YjJlYjQyOTUzYmRjM2MxZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZWJhMTdkMDYyMTg3OTQ1M2NhOGM3YWY4OWRlNzAxMTU1ZDUyMTU2NzkyYTY1NTc1YjJlYjQyOTUzYmRjM2MxZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTAwZmY2NzdlODcyZmRkNDkyZjIyN2JmZjYzNjcxNDNjNTUxZjliYzQyNWUwNmRiMDU0ZjBhMTU5OGFmOWMxYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, +  Replicas: 0, -  Replicas: 2, -  Replicas: &2, +  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, +  ResourceVersion: "", -  ResourceVersion: "1764244549784639009", -  ResourceVersion: "1764244738991935000", -  ResourceVersion: "1764244883731615009", -  ResourceVersion: "1764244918121407009", -  ResourceVersion: "1764245043529535009", -  ResourceVersion: "1764245080812063000", -  ResourceVersion: "1764245131395007009", -  ResourceVersion: "1764245181449151009", -  ResourceVersion: "1764245253652463009", -  ResourceVersion: "1764245358809391000", -  ResourceVersion: "1764245396838511009", -  ResourceVersion: "1764245497957423000", -  ResourceVersion: "1764245571409599017", -  ResourceVersion: "1764245669567295000", -  ResourceVersion: "1764245737178687017", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  SchedulerName: "default-scheduler", +  SecurityContext: nil, -  SecurityContext: s"&PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmorProfile:nil,SupplementalGroupsPolicy:nil,SELinux"..., -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2025-11-27 11:55:16 +0000 UTC", -  Time: s"2025-11-27 11:55:49 +0000 UTC", -  Time: s"2025-11-27 11:58:58 +0000 UTC", -  Time: s"2025-11-27 12:01:03 +0000 UTC", -  Time: s"2025-11-27 12:01:23 +0000 UTC", -  Time: s"2025-11-27 12:01:34 +0000 UTC", -  Time: s"2025-11-27 12:01:58 +0000 UTC", -  Time: s"2025-11-27 12:02:08 +0000 UTC", -  Time: s"2025-11-27 12:02:18 +0000 UTC", -  Time: s"2025-11-27 12:04:03 +0000 UTC", -  Time: s"2025-11-27 12:04:40 +0000 UTC", -  Time: s"2025-11-27 12:04:54 +0000 UTC", -  Time: s"2025-11-27 12:05:31 +0000 UTC", -  Time: s"2025-11-27 12:05:54 +0000 UTC", -  Time: s"2025-11-27 12:06:21 +0000 UTC", -  Time: s"2025-11-27 12:07:01 +0000 UTC", -  Time: s"2025-11-27 12:07:33 +0000 UTC", -  Time: s"2025-11-27 12:09:18 +0000 UTC", -  Time: s"2025-11-27 12:09:30 +0000 UTC", -  Time: s"2025-11-27 12:09:56 +0000 UTC", -  Time: s"2025-11-27 12:11:23 +0000 UTC", -  Time: s"2025-11-27 12:11:37 +0000 UTC", -  Time: s"2025-11-27 12:11:49 +0000 UTC", -  Time: s"2025-11-27 12:12:51 +0000 UTC", -  Time: s"2025-11-27 12:14:29 +0000 UTC", -  Time: s"2025-11-27 12:14:31 +0000 UTC", -  Time: s"2025-11-27 12:15:37 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "1ff7a429-6583-437d-9994-06a7da12cf64", -  UID: "ba1c560f-b19d-492a-9836-6cdc7206042b", -  UID: "d7a14d36-355b-4b95-834a-d32bd3d804ae", +  UpdatedReplicas: 0, -  UpdatedReplicas: 1, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-haproxy-7ffc4b8b8f", -  UpdateRevision: "some-name-haproxy-bb499bdb8", -  UpdateRevision: "some-name-proxysql-58fcfd77f8", -  UpdateRevision: "some-name-proxysql-5d47d47f59", -  UpdateRevision: "some-name-proxysql-5db9778574", -  UpdateRevision: "some-name-proxysql-67cd4bf8d6", -  UpdateRevision: "some-name-proxysql-769b66cc5c", -  UpdateRevision: "some-name-proxysql-7cc67559bd", -  UpdateRevision: "some-name-pxc-6c745d9549", -  UpdateRevision: "some-name-pxc-7d8f58d756", -  UpdateRevision: "some-name-pxc-84895d69d4", -  UpdateRevision: "some-name-pxc-b876d6f7c", -  UpdateRevision: "some-name-pxc-d49895985", -  VolumeMode: &"Filesystem", +  VolumeMode: nil, -  VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}},   }    },    },    {    },    },    {    },    }, ""),    },    {    },    },    },    "1",    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 3 identical elements    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical fields    ... // 5 identical elements    ... // 5 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 8 identical fields    ... // 9 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Annotations: map[string]string{    Args: {"haproxy"},    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMapKeyRef: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 3307,    ContainerPort: 3309,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    ContainerPort: 8404,    Containers: []v1.Container{    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-haproxy"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "READINESS_CHECK_TIMEOUT", Value: "1"}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...},    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    FieldPath: "metadata.name",    FieldPath: "metadata.namespace",    FieldRef: &v1.ObjectFieldSelector{    Finalizers: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostAliases: nil,    HostIP: "",    HostIPC: false,    Hostname: "",    HostPort: 0,    ImagePullPolicy: "Always",    ImagePullSecrets: nil,    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    "kubectl.kubernetes.io/default-container": "haproxy",    "kubectl.kubernetes.io/default-container": "proxysql",    "kubectl.kubernetes.io/default-container": "pxc",    Labels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: nil,    "last-applied-secret": strings.Join({    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-haproxy"},    LocalObjectReference: {Name: "some-name-pxc"},    ManagedFields: nil,    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    {Name: "CLUSTER_HASH", Value: "6139132"},    Name: "config",    {Name: "haproxy-auto", VolumeSource: {EmptyDir: &{}}},    Name: "haproxy-custom",    Name: "ist",    {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"},    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    Name: "mysql-replicas",    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}},    Name: "POD_NAME",    Name: "POD_NAMESPASE",    Name: "proxyadm",    Name: "proxy-protocol",    Name: "some-name-env-vars-haproxy",    Namespace: "users-22226",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}},    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "100ff677e872fdd492f227bff6367143c551f9bc425e06db054f0a1598af9c1b", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "15270eb01e0c26bb8b767c3d80cae1be45b3fc4be7332994eda130186239ee44", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "14295563-f59f-47e7-8d84-a14c4f5d992a", ...}},    OwnerReferences: nil,    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &2,    Replicas: &3,    ResizePolicy: nil,    ResourceFieldRef: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}},    SecretName: "internal-some-name",    SecretName: "some-name-env-vars-haproxy",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-haproxy",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    ShareProcessNamespace: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: {{Key: "node.alpha.kubernetes.io/unreachable", Operator: "Exists", Effect: "NoExecute", TolerationSeconds: &6000}},    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    Value: "",    ValueFrom: &v1.EnvVarSource{    VolumeAttributesClassName: nil,    VolumeClaimTemplates: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-22226 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.IacAj267yG ++ mktemp + local LAST_ERR=/tmp/tmp.hMmetouMyw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IacAj267yG perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-22226 namespace + cat /tmp/tmp.hMmetouMyw + rm /tmp/tmp.IacAj267yG /tmp/tmp.hMmetouMyw + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.SQKanV2ZIp ++ mktemp + local LAST_ERR=/tmp/tmp.nXQp6OBL33 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SQKanV2ZIp No resources found + cat /tmp/tmp.nXQp6OBL33 + rm /tmp/tmp.SQKanV2ZIp /tmp/tmp.nXQp6OBL33 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.sG2WrP6FOJ ++ mktemp + local LAST_ERR=/tmp/tmp.M7hVWFhOEg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sG2WrP6FOJ No resources found + cat /tmp/tmp.M7hVWFhOEg + rm /tmp/tmp.sG2WrP6FOJ /tmp/tmp.M7hVWFhOEg + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.dWkMOlXdTv ++ mktemp + local LAST_ERR=/tmp/tmp.Z96ljSUFuA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dWkMOlXdTv validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.Z96ljSUFuA + rm /tmp/tmp.dWkMOlXdTv /tmp/tmp.Z96ljSUFuA + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-22226 + rm -rf /tmp/tmp.OGJBrKbY7x + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.IKF7p0N1cU + desc 'test passed' + local LAST_OUT=/tmp/tmp.uI8Z6AMrgQ + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.MlunJ7A7qH + local exit_status=0 + local LAST_ERR=/tmp/tmp.jFtbwSz2Nj + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-22226 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator