Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/logs/users-5-7.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-16376 + local ns=users-16376 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-12729 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.1d10ePHMje ++ mktemp + local LAST_ERR=/tmp/tmp.6ARpim3aO9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1d10ePHMje perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-12729 namespace + cat /tmp/tmp.6ARpim3aO9 + rm /tmp/tmp.1d10ePHMje /tmp/tmp.6ARpim3aO9 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.hr1B6JyCiS ++ mktemp + local LAST_ERR=/tmp/tmp.g04M9TbAp6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hr1B6JyCiS No resources found + cat /tmp/tmp.g04M9TbAp6 + rm /tmp/tmp.hr1B6JyCiS /tmp/tmp.g04M9TbAp6 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HMhQK2dcIr ++ mktemp + local LAST_ERR=/tmp/tmp.VNaVodc2wK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HMhQK2dcIr No resources found + cat /tmp/tmp.VNaVodc2wK + rm /tmp/tmp.HMhQK2dcIr /tmp/tmp.VNaVodc2wK + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns ++ mktemp ++ mktemp + awk '{print$1}' + local LAST_OUT=/tmp/tmp.0qRhSNZhYu + local LAST_OUT=/tmp/tmp.ZC1j7NC5QG ++ mktemp + local LAST_ERR=/tmp/tmp.aWLwzFmSL2 + local exit_status=0 ++ mktemp + local LAST_ERR=/tmp/tmp.JSUJxsWiek + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0qRhSNZhYu + cat /tmp/tmp.aWLwzFmSL2 + rm /tmp/tmp.0qRhSNZhYu /tmp/tmp.aWLwzFmSL2 + return 0 namespace "users-12729" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZC1j7NC5QG namespace "pxc-operator" deleted + cat /tmp/tmp.JSUJxsWiek + rm /tmp/tmp.ZC1j7NC5QG /tmp/tmp.JSUJxsWiek + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.XM2w11l2iV ++ mktemp + local LAST_ERR=/tmp/tmp.GfN9JnQmU1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XM2w11l2iV namespace/pxc-operator created + cat /tmp/tmp.GfN9JnQmU1 + rm /tmp/tmp.XM2w11l2iV /tmp/tmp.GfN9JnQmU1 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.PAaNJyyKP7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9GleWn6G5E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PAaNJyyKP7 ++ cat /tmp/tmp.9GleWn6G5E ++ rm /tmp/tmp.PAaNJyyKP7 /tmp/tmp.9GleWn6G5E ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2260-a2c7ae05-5-cluster1 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.sXNULMWX3Y ++ mktemp + local LAST_ERR=/tmp/tmp.8fMbZyokFd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2260-a2c7ae05-5-cluster1 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sXNULMWX3Y Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2260-a2c7ae05-5-cluster1" modified. + cat /tmp/tmp.8fMbZyokFd + rm /tmp/tmp.sXNULMWX3Y /tmp/tmp.8fMbZyokFd + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.SzxylI06m5 ++ mktemp + local LAST_ERR=/tmp/tmp.91tCZdvDhn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SzxylI06m5 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.91tCZdvDhn + rm /tmp/tmp.SzxylI06m5 /tmp/tmp.91tCZdvDhn + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/deploy/cw-rbac.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.oAm2Wu7hcE ++ mktemp + local LAST_ERR=/tmp/tmp.NOrkLMytSH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oAm2Wu7hcE clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.NOrkLMytSH + rm /tmp/tmp.oAm2Wu7hcE /tmp/tmp.NOrkLMytSH + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2260-a2c7ae05^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + local LAST_OUT=/tmp/tmp.eGygedRdDr ++ mktemp + local LAST_ERR=/tmp/tmp.QySJBwVAT7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eGygedRdDr deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.QySJBwVAT7 + rm /tmp/tmp.eGygedRdDr /tmp/tmp.QySJBwVAT7 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.d6EZJoII0z ++ mktemp + local LAST_ERR=/tmp/tmp.NylJspEGiV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.d6EZJoII0z pod/percona-xtradb-cluster-operator-6594d54b57-26m6j condition met + cat /tmp/tmp.NylJspEGiV + rm /tmp/tmp.d6EZJoII0z /tmp/tmp.NylJspEGiV + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.aC2mULwSLM +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZRLD8El6Xr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aC2mULwSLM ++ cat /tmp/tmp.ZRLD8El6Xr ++ rm /tmp/tmp.aC2mULwSLM /tmp/tmp.ZRLD8El6Xr ++ return 0 + wait_pod percona-xtradb-cluster-operator-6594d54b57-26m6j 480 pxc-operator + local pod=percona-xtradb-cluster-operator-6594d54b57-26m6j + local max_retry=480 + local ns=pxc-operator ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo percona-xtradb-cluster-operator-6594d54b57-26m6j + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-6594d54b57-26m6j condition met waiting for pod/percona-xtradb-cluster-operator-6594d54b57-26m6j to become Ready.Ok + sleep 3 + create_namespace users-16376 + local namespace=users-16376 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-16376' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-16376 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-16376 + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.MKpfBLGoG0 + local LAST_OUT=/tmp/tmp.74ci163y0f ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.uVa5EZlNBU + local exit_status=0 + local LAST_ERR=/tmp/tmp.hyaYuHVHVE + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-16376 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-16376 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.74ci163y0f + cat /tmp/tmp.hyaYuHVHVE + rm /tmp/tmp.74ci163y0f /tmp/tmp.hyaYuHVHVE + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-16376 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.MKpfBLGoG0 + cat /tmp/tmp.uVa5EZlNBU Error from server (NotFound): namespaces "users-16376" not found + rm /tmp/tmp.MKpfBLGoG0 /tmp/tmp.uVa5EZlNBU + return 1 + : + wait_for_delete namespace/users-16376 + local res=namespace/users-16376 + echo -n 'waiting for namespace/users-16376 to be deleted' waiting for namespace/users-16376 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-16376" not found + desc 'create namespace users-16376' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-16376 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-16376 ++ mktemp + local LAST_OUT=/tmp/tmp.3PynaWerHW ++ mktemp + local LAST_ERR=/tmp/tmp.9FzZGlWNPI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-16376 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3PynaWerHW namespace/users-16376 created + cat /tmp/tmp.9FzZGlWNPI + rm /tmp/tmp.3PynaWerHW /tmp/tmp.9FzZGlWNPI + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.eZLuedLAND +++ mktemp ++ local LAST_ERR=/tmp/tmp.ApsBstqCGR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eZLuedLAND ++ cat /tmp/tmp.ApsBstqCGR ++ rm /tmp/tmp.eZLuedLAND /tmp/tmp.ApsBstqCGR ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2260-a2c7ae05-5-cluster1 --namespace=users-16376 ++ mktemp + local LAST_OUT=/tmp/tmp.wSlBr2Xgv9 ++ mktemp + local LAST_ERR=/tmp/tmp.ycJoNJFwys + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2260-a2c7ae05-5-cluster1 --namespace=users-16376 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wSlBr2Xgv9 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2260-a2c7ae05-5-cluster1" modified. + cat /tmp/tmp.ycJoNJFwys + rm /tmp/tmp.wSlBr2Xgv9 /tmp/tmp.ycJoNJFwys + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.PqXC5npHxy ++ mktemp + local LAST_ERR=/tmp/tmp.l66BY95ZAF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PqXC5npHxy secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.l66BY95ZAF + rm /tmp/tmp.PqXC5npHxy /tmp/tmp.l66BY95ZAF + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.qVEiVVYQEf ++ mktemp + local LAST_ERR=/tmp/tmp.H7MWFeG1Mq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qVEiVVYQEf secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.H7MWFeG1Mq + rm /tmp/tmp.qVEiVVYQEf /tmp/tmp.H7MWFeG1Mq + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/client.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2260-a2c7ae05#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_OUT=/tmp/tmp.MM4tIUyP23 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-16376~ ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.MR8GKEr4QM + local exit_status=0 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MM4tIUyP23 deployment.apps/pxc-client created + cat /tmp/tmp.MR8GKEr4QM + rm /tmp/tmp.MM4tIUyP23 /tmp/tmp.MR8GKEr4QM + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_OUT=/tmp/tmp.ydcXceAHZ1 + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.CeE1FahOe0 + local exit_status=0 + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2260-a2c7ae05#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-16376~ + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ydcXceAHZ1 perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.CeE1FahOe0 + rm /tmp/tmp.ydcXceAHZ1 /tmp/tmp.CeE1FahOe0 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.1PLg5oz2Zv ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Zx6PHhAwRn +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.1PLg5oz2Zv +++ cat /tmp/tmp.Zx6PHhAwRn +++ rm /tmp/tmp.1PLg5oz2Zv /tmp/tmp.Zx6PHhAwRn +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.MYjnCecD0B ++++ mktemp +++ local LAST_ERR=/tmp/tmp.2BiSlytJIb +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.MYjnCecD0B +++ cat /tmp/tmp.2BiSlytJIb +++ rm /tmp/tmp.MYjnCecD0B /tmp/tmp.2BiSlytJIb +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-16376 ++ mktemp + local LAST_OUT=/tmp/tmp.NM0UiOgXOz ++ mktemp + local LAST_ERR=/tmp/tmp.zlRBbGboCx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-16376 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-16376 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-16376 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.NM0UiOgXOz + cat /tmp/tmp.zlRBbGboCx error: no matching resources found + rm /tmp/tmp.NM0UiOgXOz /tmp/tmp.zlRBbGboCx + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ibmFshGtb6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.siAIDDuZAO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ibmFshGtb6 ++ cat /tmp/tmp.siAIDDuZAO ++ rm /tmp/tmp.ibmFshGtb6 /tmp/tmp.siAIDDuZAO ++ return 0 + local 'root_pass=*M-xKEC=w#]-kwGz2' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u5EkkqaYbw +++ mktemp ++ local LAST_ERR=/tmp/tmp.krES0UrPJG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u5EkkqaYbw ++ cat /tmp/tmp.krES0UrPJG Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.u5EkkqaYbw /tmp/tmp.krES0UrPJG ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uIJDqnZTtA +++ mktemp ++ local LAST_ERR=/tmp/tmp.aDYNeW5j78 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uIJDqnZTtA ++ cat /tmp/tmp.aDYNeW5j78 ++ rm /tmp/tmp.uIJDqnZTtA /tmp/tmp.aDYNeW5j78 ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.miowW1Y6Vi +++ mktemp ++ local LAST_ERR=/tmp/tmp.dhCkdQ7IAo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.miowW1Y6Vi ++ cat /tmp/tmp.dhCkdQ7IAo ++ rm /tmp/tmp.miowW1Y6Vi /tmp/tmp.dhCkdQ7IAo ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bRJPIf6kaF +++ mktemp ++ local LAST_ERR=/tmp/tmp.3uMM2c6yfB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bRJPIf6kaF ++ cat /tmp/tmp.3uMM2c6yfB ++ rm /tmp/tmp.bRJPIf6kaF /tmp/tmp.3uMM2c6yfB ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-1.sql /tmp/tmp.AxwkEOwxIb/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Ns3ariPBn +++ mktemp ++ local LAST_ERR=/tmp/tmp.sQ3u3iP7RM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Ns3ariPBn ++ cat /tmp/tmp.sQ3u3iP7RM ++ rm /tmp/tmp.3Ns3ariPBn /tmp/tmp.sQ3u3iP7RM ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-1.sql /tmp/tmp.AxwkEOwxIb/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''*M-xKEC=w#]-kwGz2'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AolKDsX0n3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lL3yLrL6wg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AolKDsX0n3 ++ cat /tmp/tmp.lL3yLrL6wg ++ rm /tmp/tmp.AolKDsX0n3 /tmp/tmp.lL3yLrL6wg ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-1.sql /tmp/tmp.AxwkEOwxIb/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l2ytjGdxEk +++ mktemp ++ local LAST_ERR=/tmp/tmp.iRG3e8TGc9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l2ytjGdxEk ++ cat /tmp/tmp.iRG3e8TGc9 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.l2ytjGdxEk /tmp/tmp.iRG3e8TGc9 ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.PlvR3U6jOT +++ mktemp ++ local LAST_ERR=/tmp/tmp.pcqMV1X2Jr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PlvR3U6jOT ++ cat /tmp/tmp.pcqMV1X2Jr ++ rm /tmp/tmp.PlvR3U6jOT /tmp/tmp.pcqMV1X2Jr ++ return 0 + secret_pass='*M-xKEC=w#]-kwGz2' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.e21QWAZSrO +++ mktemp ++ local LAST_ERR=/tmp/tmp.2rnG3eONJZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e21QWAZSrO ++ cat /tmp/tmp.2rnG3eONJZ ++ rm /tmp/tmp.e21QWAZSrO /tmp/tmp.2rnG3eONJZ ++ return 0 + int_secret_pass='*M-xKEC=w#]-kwGz2' + [[ -z *M-xKEC=w#]-kwGz2 ]] + [[ *M-xKEC=w#]-kwGz2 != \*\M\-\x\K\E\C\=\w\#\]\-\k\w\G\z\2 ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''*M-xKEC=w#]-kwGz2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''*M-xKEC=w#]-kwGz2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''*M-xKEC=w#]-kwGz2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''*M-xKEC=w#]-kwGz2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OElFsrEYwN +++ mktemp ++ local LAST_ERR=/tmp/tmp.oPm9MMDRRX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OElFsrEYwN ++ cat /tmp/tmp.oPm9MMDRRX ++ rm /tmp/tmp.OElFsrEYwN /tmp/tmp.oPm9MMDRRX ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql /tmp/tmp.AxwkEOwxIb/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.5L7zPoR1bo +++ mktemp ++ local LAST_ERR=/tmp/tmp.8Np94vjWUh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5L7zPoR1bo ++ cat /tmp/tmp.8Np94vjWUh ++ rm /tmp/tmp.5L7zPoR1bo /tmp/tmp.8Np94vjWUh ++ return 0 + secret_pass=',>q1A9!j<>ZZGuvt28' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ base64 --decode ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mza8f8wiTX +++ mktemp ++ local LAST_ERR=/tmp/tmp.1CexkAa9ef ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mza8f8wiTX ++ cat /tmp/tmp.1CexkAa9ef ++ rm /tmp/tmp.mza8f8wiTX /tmp/tmp.1CexkAa9ef ++ return 0 + int_secret_pass=',>q1A9!j<>ZZGuvt28' + [[ -z ,>q1A9!j<>ZZGuvt28 ]] + [[ ,>q1A9!j<>ZZGuvt28 != \,\>\q\1\A\9\!\j\<\>\Z\Z\G\u\v\t\2\8 ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\'',>q1A9!j<>ZZGuvt28'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\'',>q1A9!j<>ZZGuvt28'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\'',>q1A9!j<>ZZGuvt28'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\'',>q1A9!j<>ZZGuvt28'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hi92WdN7ha +++ mktemp ++ local LAST_ERR=/tmp/tmp.wVmTKnrKKd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hi92WdN7ha ++ cat /tmp/tmp.wVmTKnrKKd ++ rm /tmp/tmp.Hi92WdN7ha /tmp/tmp.wVmTKnrKKd ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql /tmp/tmp.AxwkEOwxIb/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.QNvHDyqiFl +++ mktemp ++ local LAST_ERR=/tmp/tmp.nfFMPZb4zq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QNvHDyqiFl ++ cat /tmp/tmp.nfFMPZb4zq ++ rm /tmp/tmp.QNvHDyqiFl /tmp/tmp.nfFMPZb4zq ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.kdMjtALKpv +++ mktemp ++ local LAST_ERR=/tmp/tmp.pHDKBr8sQ0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kdMjtALKpv ++ cat /tmp/tmp.pHDKBr8sQ0 ++ rm /tmp/tmp.kdMjtALKpv /tmp/tmp.pHDKBr8sQ0 ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CqFwmUP2kJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.dxRwPUym1Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CqFwmUP2kJ ++ cat /tmp/tmp.dxRwPUym1Q ++ rm /tmp/tmp.CqFwmUP2kJ /tmp/tmp.dxRwPUym1Q ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql /tmp/tmp.AxwkEOwxIb/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.j6u3HqYX2S +++ mktemp ++ local LAST_ERR=/tmp/tmp.EN1kdgnCma ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j6u3HqYX2S ++ cat /tmp/tmp.EN1kdgnCma ++ rm /tmp/tmp.j6u3HqYX2S /tmp/tmp.EN1kdgnCma ++ return 0 + secret_pass='=lwWIvi{DuAlu_iLW' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.a0yzXnaN2o +++ mktemp ++ local LAST_ERR=/tmp/tmp.IOudKQuIs2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a0yzXnaN2o ++ cat /tmp/tmp.IOudKQuIs2 ++ rm /tmp/tmp.a0yzXnaN2o /tmp/tmp.IOudKQuIs2 ++ return 0 + int_secret_pass='=lwWIvi{DuAlu_iLW' + [[ -z =lwWIvi{DuAlu_iLW ]] + [[ =lwWIvi{DuAlu_iLW != \=\l\w\W\I\v\i\{\D\u\A\l\u\_\i\L\W ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''=lwWIvi{DuAlu_iLW'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''=lwWIvi{DuAlu_iLW'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''=lwWIvi{DuAlu_iLW'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''=lwWIvi{DuAlu_iLW'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-2.sql /tmp/tmp.AxwkEOwxIb/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.lrBKYAGVv0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.J7YvZ128Lm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lrBKYAGVv0 ++ cat /tmp/tmp.J7YvZ128Lm ++ rm /tmp/tmp.lrBKYAGVv0 /tmp/tmp.J7YvZ128Lm ++ return 0 + secret_pass='#SqKC_(._-*O(I9Brq4' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.JPDMp8HWQT +++ mktemp ++ local LAST_ERR=/tmp/tmp.7IQKvYEN0J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JPDMp8HWQT ++ cat /tmp/tmp.7IQKvYEN0J ++ rm /tmp/tmp.JPDMp8HWQT /tmp/tmp.7IQKvYEN0J ++ return 0 + int_secret_pass='#SqKC_(._-*O(I9Brq4' + [[ -z #SqKC_(._-*O(I9Brq4 ]] + [[ #SqKC_(._-*O(I9Brq4 != \#\S\q\K\C\_\(\.\_\-\*\O\(\I\9\B\r\q\4 ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''#SqKC_(._-*O(I9Brq4'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''#SqKC_(._-*O(I9Brq4'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''#SqKC_(._-*O(I9Brq4'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''#SqKC_(._-*O(I9Brq4'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W7KxJfW50Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.38qkvXl0t0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W7KxJfW50Z ++ cat /tmp/tmp.38qkvXl0t0 ++ rm /tmp/tmp.W7KxJfW50Z /tmp/tmp.38qkvXl0t0 ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql /tmp/tmp.AxwkEOwxIb/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mi7CtQNBRr +++ mktemp ++ local LAST_ERR=/tmp/tmp.jbCd5gwv8Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Mi7CtQNBRr ++ cat /tmp/tmp.jbCd5gwv8Y ++ rm /tmp/tmp.Mi7CtQNBRr /tmp/tmp.jbCd5gwv8Y ++ return 0 + secret_pass='CDz*T*(q_Yl7KRLR' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.vNxsAhccKT +++ mktemp ++ local LAST_ERR=/tmp/tmp.pYATtWfx40 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vNxsAhccKT ++ cat /tmp/tmp.pYATtWfx40 ++ rm /tmp/tmp.vNxsAhccKT /tmp/tmp.pYATtWfx40 ++ return 0 + int_secret_pass='CDz*T*(q_Yl7KRLR' + [[ -z CDz*T*(q_Yl7KRLR ]] + [[ CDz*T*(q_Yl7KRLR != \C\D\z\*\T\*\(\q\_\Y\l\7\K\R\L\R ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''CDz*T*(q_Yl7KRLR'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''CDz*T*(q_Yl7KRLR'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''CDz*T*(q_Yl7KRLR'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''CDz*T*(q_Yl7KRLR'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sPCMgiR0MI +++ mktemp ++ local LAST_ERR=/tmp/tmp.DOOr1AyP5I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sPCMgiR0MI ++ cat /tmp/tmp.DOOr1AyP5I ++ rm /tmp/tmp.sPCMgiR0MI /tmp/tmp.DOOr1AyP5I ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql /tmp/tmp.AxwkEOwxIb/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ComSLw4vIk ++ mktemp + local LAST_ERR=/tmp/tmp.OqHNbyzdJg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ComSLw4vIk secret/my-cluster-secrets patched + cat /tmp/tmp.OqHNbyzdJg + rm /tmp/tmp.ComSLw4vIk /tmp/tmp.OqHNbyzdJg + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7NP9H1E0TF +++ mktemp ++ local LAST_ERR=/tmp/tmp.9Pw6I5Ncjw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7NP9H1E0TF ++ cat /tmp/tmp.9Pw6I5Ncjw ++ rm /tmp/tmp.7NP9H1E0TF /tmp/tmp.9Pw6I5Ncjw ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql /tmp/tmp.AxwkEOwxIb/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jY9FSFj8YX ++ mktemp + local LAST_ERR=/tmp/tmp.zjO039Hy4Q + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jY9FSFj8YX perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.zjO039Hy4Q + rm /tmp/tmp.jY9FSFj8YX /tmp/tmp.zjO039Hy4Q + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iAQO5ta53c +++ mktemp ++ local LAST_ERR=/tmp/tmp.K0qHJ22vJT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iAQO5ta53c ++ cat /tmp/tmp.K0qHJ22vJT ++ rm /tmp/tmp.iAQO5ta53c /tmp/tmp.K0qHJ22vJT ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XWVKeazgD6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BogpMVcpYY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XWVKeazgD6 ++ cat /tmp/tmp.BogpMVcpYY ++ rm /tmp/tmp.XWVKeazgD6 /tmp/tmp.BogpMVcpYY ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ZuROZAQKPu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZiD9RaqggX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ZuROZAQKPu +++++ cat /tmp/tmp.ZiD9RaqggX +++++ rm /tmp/tmp.ZuROZAQKPu /tmp/tmp.ZiD9RaqggX +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xpDzPXN4P4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.drl82MmX5N +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xpDzPXN4P4 +++++ cat /tmp/tmp.drl82MmX5N +++++ rm /tmp/tmp.xpDzPXN4P4 /tmp/tmp.drl82MmX5N +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jufHm2NrqR +++ mktemp ++ local LAST_ERR=/tmp/tmp.exNIOZLd5G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jufHm2NrqR ++ cat /tmp/tmp.exNIOZLd5G ++ rm /tmp/tmp.jufHm2NrqR /tmp/tmp.exNIOZLd5G ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.mTI7dp1j8q ++ mktemp + local LAST_ERR=/tmp/tmp.cLDyoFuRtF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mTI7dp1j8q secret/my-cluster-secrets patched + cat /tmp/tmp.cLDyoFuRtF + rm /tmp/tmp.mTI7dp1j8q /tmp/tmp.cLDyoFuRtF + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PYJAUOWtIU +++ mktemp ++ local LAST_ERR=/tmp/tmp.LFcphzt2r7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PYJAUOWtIU ++ cat /tmp/tmp.LFcphzt2r7 ++ rm /tmp/tmp.PYJAUOWtIU /tmp/tmp.LFcphzt2r7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HXeQ1cBIli +++ mktemp ++ local LAST_ERR=/tmp/tmp.TP6wwY3Dky ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HXeQ1cBIli ++ cat /tmp/tmp.TP6wwY3Dky ++ rm /tmp/tmp.HXeQ1cBIli /tmp/tmp.TP6wwY3Dky ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.99OclGBimD +++ mktemp ++ local LAST_ERR=/tmp/tmp.5bDW2LNI59 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.99OclGBimD ++ cat /tmp/tmp.5bDW2LNI59 ++ rm /tmp/tmp.99OclGBimD /tmp/tmp.5bDW2LNI59 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BFTdOu0wt6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YYp0g4pne9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BFTdOu0wt6 ++ cat /tmp/tmp.YYp0g4pne9 ++ rm /tmp/tmp.BFTdOu0wt6 /tmp/tmp.YYp0g4pne9 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DYFfdngQQ6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2qXlb2snnm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DYFfdngQQ6 ++ cat /tmp/tmp.2qXlb2snnm ++ rm /tmp/tmp.DYFfdngQQ6 /tmp/tmp.2qXlb2snnm ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tOrjTQNlF2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bLy1gPqhrw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tOrjTQNlF2 +++++ cat /tmp/tmp.bLy1gPqhrw +++++ rm /tmp/tmp.tOrjTQNlF2 /tmp/tmp.bLy1gPqhrw +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.wWpZbr6AIL ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.eLX918tSIi +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.wWpZbr6AIL +++++ cat /tmp/tmp.eLX918tSIi +++++ rm /tmp/tmp.wWpZbr6AIL /tmp/tmp.eLX918tSIi +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OWLQNs30uN +++ mktemp ++ local LAST_ERR=/tmp/tmp.AtFrGqfeAz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OWLQNs30uN ++ cat /tmp/tmp.AtFrGqfeAz ++ rm /tmp/tmp.OWLQNs30uN /tmp/tmp.AtFrGqfeAz ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-2.sql /tmp/tmp.AxwkEOwxIb/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-2.sql /tmp/tmp.AxwkEOwxIb/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-2.sql /tmp/tmp.AxwkEOwxIb/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.hXZehHgbj4 ++ mktemp + local LAST_ERR=/tmp/tmp.XvawG9b0hL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hXZehHgbj4 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.XvawG9b0hL + rm /tmp/tmp.hXZehHgbj4 /tmp/tmp.XvawG9b0hL + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.269XYh7q6C ++ mktemp + local LAST_ERR=/tmp/tmp.NIX9P9RnDO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.269XYh7q6C secret/my-cluster-secrets patched + cat /tmp/tmp.NIX9P9RnDO + rm /tmp/tmp.269XYh7q6C /tmp/tmp.NIX9P9RnDO + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zid1WMpbUm +++ mktemp ++ local LAST_ERR=/tmp/tmp.bskMvBqoF2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zid1WMpbUm ++ cat /tmp/tmp.bskMvBqoF2 ++ rm /tmp/tmp.zid1WMpbUm /tmp/tmp.bskMvBqoF2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vyqyqMKGPW +++ mktemp ++ local LAST_ERR=/tmp/tmp.C3HyU4GNfH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vyqyqMKGPW ++ cat /tmp/tmp.C3HyU4GNfH ++ rm /tmp/tmp.vyqyqMKGPW /tmp/tmp.C3HyU4GNfH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4MHd3Kzf2J +++ mktemp ++ local LAST_ERR=/tmp/tmp.U70yDxOm56 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4MHd3Kzf2J ++ cat /tmp/tmp.U70yDxOm56 ++ rm /tmp/tmp.4MHd3Kzf2J /tmp/tmp.U70yDxOm56 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tRz0l8tTlP +++ mktemp ++ local LAST_ERR=/tmp/tmp.8wPVaX2Tm2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tRz0l8tTlP ++ cat /tmp/tmp.8wPVaX2Tm2 ++ rm /tmp/tmp.tRz0l8tTlP /tmp/tmp.8wPVaX2Tm2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lmrn9XrGEy +++ mktemp ++ local LAST_ERR=/tmp/tmp.0XAApzmIes ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lmrn9XrGEy ++ cat /tmp/tmp.0XAApzmIes ++ rm /tmp/tmp.Lmrn9XrGEy /tmp/tmp.0XAApzmIes ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tepNHwy8b9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zizi2O0L7n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tepNHwy8b9 ++ cat /tmp/tmp.zizi2O0L7n ++ rm /tmp/tmp.tepNHwy8b9 /tmp/tmp.zizi2O0L7n ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5HSy5fD5N2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.feNbsXgJOx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5HSy5fD5N2 ++ cat /tmp/tmp.feNbsXgJOx ++ rm /tmp/tmp.5HSy5fD5N2 /tmp/tmp.feNbsXgJOx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h368i3lhvh +++ mktemp ++ local LAST_ERR=/tmp/tmp.G2PuD91pLn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h368i3lhvh ++ cat /tmp/tmp.G2PuD91pLn ++ rm /tmp/tmp.h368i3lhvh /tmp/tmp.G2PuD91pLn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WoE2eZ2fXg +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ltn737qyne ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WoE2eZ2fXg ++ cat /tmp/tmp.Ltn737qyne ++ rm /tmp/tmp.WoE2eZ2fXg /tmp/tmp.Ltn737qyne ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EDGpeLosC3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yU2R8RsoC3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EDGpeLosC3 ++ cat /tmp/tmp.yU2R8RsoC3 ++ rm /tmp/tmp.EDGpeLosC3 /tmp/tmp.yU2R8RsoC3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y51qRuqetV +++ mktemp ++ local LAST_ERR=/tmp/tmp.fHdxe8653J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y51qRuqetV ++ cat /tmp/tmp.fHdxe8653J ++ rm /tmp/tmp.y51qRuqetV /tmp/tmp.fHdxe8653J ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G6wuA31YC8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DboWtQxgsO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G6wuA31YC8 ++ cat /tmp/tmp.DboWtQxgsO ++ rm /tmp/tmp.G6wuA31YC8 /tmp/tmp.DboWtQxgsO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mCfD38yM7V +++ mktemp ++ local LAST_ERR=/tmp/tmp.ntZopYR1h7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mCfD38yM7V ++ cat /tmp/tmp.ntZopYR1h7 ++ rm /tmp/tmp.mCfD38yM7V /tmp/tmp.ntZopYR1h7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qupaSX96VC +++ mktemp ++ local LAST_ERR=/tmp/tmp.7F8a4DIiZE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qupaSX96VC ++ cat /tmp/tmp.7F8a4DIiZE ++ rm /tmp/tmp.qupaSX96VC /tmp/tmp.7F8a4DIiZE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.utyLaViWy4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NSfeWmNvFj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.utyLaViWy4 ++ cat /tmp/tmp.NSfeWmNvFj ++ rm /tmp/tmp.utyLaViWy4 /tmp/tmp.NSfeWmNvFj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MIyukCehgk +++ mktemp ++ local LAST_ERR=/tmp/tmp.PsbzKWYaZx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MIyukCehgk ++ cat /tmp/tmp.PsbzKWYaZx ++ rm /tmp/tmp.MIyukCehgk /tmp/tmp.PsbzKWYaZx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oxw8f0UJmw +++ mktemp ++ local LAST_ERR=/tmp/tmp.UKX4XgGJhn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oxw8f0UJmw ++ cat /tmp/tmp.UKX4XgGJhn ++ rm /tmp/tmp.oxw8f0UJmw /tmp/tmp.UKX4XgGJhn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BUXYKID6lK +++ mktemp ++ local LAST_ERR=/tmp/tmp.AbhUEDg9Ka ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BUXYKID6lK ++ cat /tmp/tmp.AbhUEDg9Ka ++ rm /tmp/tmp.BUXYKID6lK /tmp/tmp.AbhUEDg9Ka ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8Or2WGeyb6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.h1cJ09tNO5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8Or2WGeyb6 ++ cat /tmp/tmp.h1cJ09tNO5 ++ rm /tmp/tmp.8Or2WGeyb6 /tmp/tmp.h1cJ09tNO5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ThHXruv1eV +++ mktemp ++ local LAST_ERR=/tmp/tmp.XLIODN8Fvq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ThHXruv1eV ++ cat /tmp/tmp.XLIODN8Fvq ++ rm /tmp/tmp.ThHXruv1eV /tmp/tmp.XLIODN8Fvq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e2EnrODe6c +++ mktemp ++ local LAST_ERR=/tmp/tmp.D0ZvvHQHC8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e2EnrODe6c ++ cat /tmp/tmp.D0ZvvHQHC8 ++ rm /tmp/tmp.e2EnrODe6c /tmp/tmp.D0ZvvHQHC8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RSPPjConYj +++ mktemp ++ local LAST_ERR=/tmp/tmp.vCRruclS6E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RSPPjConYj ++ cat /tmp/tmp.vCRruclS6E ++ rm /tmp/tmp.RSPPjConYj /tmp/tmp.vCRruclS6E ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RmPGFD07Qf +++ mktemp ++ local LAST_ERR=/tmp/tmp.y8ELzlpwjo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RmPGFD07Qf ++ cat /tmp/tmp.y8ELzlpwjo ++ rm /tmp/tmp.RmPGFD07Qf /tmp/tmp.y8ELzlpwjo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g5oOr59YXy +++ mktemp ++ local LAST_ERR=/tmp/tmp.fiDiUpqRTu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g5oOr59YXy ++ cat /tmp/tmp.fiDiUpqRTu ++ rm /tmp/tmp.g5oOr59YXy /tmp/tmp.fiDiUpqRTu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4RaNPEtAvU +++ mktemp ++ local LAST_ERR=/tmp/tmp.24oe4A7dck ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4RaNPEtAvU ++ cat /tmp/tmp.24oe4A7dck ++ rm /tmp/tmp.4RaNPEtAvU /tmp/tmp.24oe4A7dck ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CvSfrwv4Mn +++ mktemp ++ local LAST_ERR=/tmp/tmp.iQigzqgWte ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CvSfrwv4Mn ++ cat /tmp/tmp.iQigzqgWte ++ rm /tmp/tmp.CvSfrwv4Mn /tmp/tmp.iQigzqgWte ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UFZlbkG9ba +++ mktemp ++ local LAST_ERR=/tmp/tmp.tkabIVTw7e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UFZlbkG9ba ++ cat /tmp/tmp.tkabIVTw7e ++ rm /tmp/tmp.UFZlbkG9ba /tmp/tmp.tkabIVTw7e ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OOMXAMhoHi +++ mktemp ++ local LAST_ERR=/tmp/tmp.LIly8EjVgN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OOMXAMhoHi ++ cat /tmp/tmp.LIly8EjVgN ++ rm /tmp/tmp.OOMXAMhoHi /tmp/tmp.LIly8EjVgN ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.TfItINk1hW ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4ZUPk19Syt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.TfItINk1hW +++++ cat /tmp/tmp.4ZUPk19Syt +++++ rm /tmp/tmp.TfItINk1hW /tmp/tmp.4ZUPk19Syt +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xFddPciTWI ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.2o8HA5IgiC +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xFddPciTWI +++++ cat /tmp/tmp.2o8HA5IgiC +++++ rm /tmp/tmp.xFddPciTWI /tmp/tmp.2o8HA5IgiC +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.miTW1kaMRa +++ mktemp ++ local LAST_ERR=/tmp/tmp.R8Zn2NBTG3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.miTW1kaMRa ++ cat /tmp/tmp.R8Zn2NBTG3 ++ rm /tmp/tmp.miTW1kaMRa /tmp/tmp.R8Zn2NBTG3 ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-3.sql /tmp/tmp.AxwkEOwxIb/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.q0NEORPeIP ++ mktemp + local LAST_ERR=/tmp/tmp.45QbbNSpiT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.q0NEORPeIP secret/my-cluster-secrets patched + cat /tmp/tmp.45QbbNSpiT + rm /tmp/tmp.q0NEORPeIP /tmp/tmp.45QbbNSpiT + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.BVsw0cq3Ex +++ mktemp ++ local LAST_ERR=/tmp/tmp.gcdGTkEaWa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BVsw0cq3Ex ++ cat /tmp/tmp.gcdGTkEaWa ++ rm /tmp/tmp.BVsw0cq3Ex /tmp/tmp.gcdGTkEaWa ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fNwvOovR3S +++ mktemp ++ local LAST_ERR=/tmp/tmp.dZysbuqtOM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fNwvOovR3S ++ cat /tmp/tmp.dZysbuqtOM ++ rm /tmp/tmp.fNwvOovR3S /tmp/tmp.dZysbuqtOM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B5EpEdz4KY +++ mktemp ++ local LAST_ERR=/tmp/tmp.ir1y4dQSFT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B5EpEdz4KY ++ cat /tmp/tmp.ir1y4dQSFT ++ rm /tmp/tmp.B5EpEdz4KY /tmp/tmp.ir1y4dQSFT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.USd3cZgzYV +++ mktemp ++ local LAST_ERR=/tmp/tmp.pteMHWyEsR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.USd3cZgzYV ++ cat /tmp/tmp.pteMHWyEsR ++ rm /tmp/tmp.USd3cZgzYV /tmp/tmp.pteMHWyEsR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y8FXrp4K1i +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gsjn02r0aV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y8FXrp4K1i ++ cat /tmp/tmp.Gsjn02r0aV ++ rm /tmp/tmp.Y8FXrp4K1i /tmp/tmp.Gsjn02r0aV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lp8Ybv8Kjy +++ mktemp ++ local LAST_ERR=/tmp/tmp.mXfjvjohE1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lp8Ybv8Kjy ++ cat /tmp/tmp.mXfjvjohE1 ++ rm /tmp/tmp.Lp8Ybv8Kjy /tmp/tmp.mXfjvjohE1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wgft060L0d +++ mktemp ++ local LAST_ERR=/tmp/tmp.wejxlUQXFd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wgft060L0d ++ cat /tmp/tmp.wejxlUQXFd ++ rm /tmp/tmp.wgft060L0d /tmp/tmp.wejxlUQXFd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m7YcUIINkm +++ mktemp ++ local LAST_ERR=/tmp/tmp.CI1YAdqnVh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m7YcUIINkm ++ cat /tmp/tmp.CI1YAdqnVh ++ rm /tmp/tmp.m7YcUIINkm /tmp/tmp.CI1YAdqnVh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rTDFmsHlzU +++ mktemp ++ local LAST_ERR=/tmp/tmp.jqCDYBuXHH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rTDFmsHlzU ++ cat /tmp/tmp.jqCDYBuXHH ++ rm /tmp/tmp.rTDFmsHlzU /tmp/tmp.jqCDYBuXHH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z2TvO2euQC +++ mktemp ++ local LAST_ERR=/tmp/tmp.gsh8OKwV8e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z2TvO2euQC ++ cat /tmp/tmp.gsh8OKwV8e ++ rm /tmp/tmp.z2TvO2euQC /tmp/tmp.gsh8OKwV8e ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dTRHZGR2UX +++ mktemp ++ local LAST_ERR=/tmp/tmp.wDmLWOl8GR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dTRHZGR2UX ++ cat /tmp/tmp.wDmLWOl8GR ++ rm /tmp/tmp.dTRHZGR2UX /tmp/tmp.wDmLWOl8GR ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.rochCYTqMT ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.FjjLrnKREw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.rochCYTqMT +++++ cat /tmp/tmp.FjjLrnKREw +++++ rm /tmp/tmp.rochCYTqMT /tmp/tmp.FjjLrnKREw +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mQ1D8G3mlG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1XTtaHU6sP +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mQ1D8G3mlG +++++ cat /tmp/tmp.1XTtaHU6sP +++++ rm /tmp/tmp.mQ1D8G3mlG /tmp/tmp.1XTtaHU6sP +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2HcsAkIZHT +++ mktemp ++ local LAST_ERR=/tmp/tmp.X5iI3l6FfJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2HcsAkIZHT ++ cat /tmp/tmp.X5iI3l6FfJ ++ rm /tmp/tmp.2HcsAkIZHT /tmp/tmp.X5iI3l6FfJ ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BQgJxJgWYZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.tTUB8O0Zrd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BQgJxJgWYZ ++ cat /tmp/tmp.tTUB8O0Zrd ++ rm /tmp/tmp.BQgJxJgWYZ /tmp/tmp.tTUB8O0Zrd ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql /tmp/tmp.AxwkEOwxIb/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.bKjwIFZLw5 ++ mktemp + local LAST_ERR=/tmp/tmp.IIuoeWOwkM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bKjwIFZLw5 secret/my-cluster-secrets patched + cat /tmp/tmp.IIuoeWOwkM + rm /tmp/tmp.bKjwIFZLw5 /tmp/tmp.IIuoeWOwkM + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZjShVsxnDB +++ mktemp ++ local LAST_ERR=/tmp/tmp.KSZSo8VYh0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZjShVsxnDB ++ cat /tmp/tmp.KSZSo8VYh0 ++ rm /tmp/tmp.ZjShVsxnDB /tmp/tmp.KSZSo8VYh0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zajtR2Gced +++ mktemp ++ local LAST_ERR=/tmp/tmp.eq2jUEni4g ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zajtR2Gced ++ cat /tmp/tmp.eq2jUEni4g ++ rm /tmp/tmp.zajtR2Gced /tmp/tmp.eq2jUEni4g ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.roavXnn804 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kqNPJ7hMPX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.roavXnn804 ++ cat /tmp/tmp.kqNPJ7hMPX ++ rm /tmp/tmp.roavXnn804 /tmp/tmp.kqNPJ7hMPX ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nFVqAzwyPn +++ mktemp ++ local LAST_ERR=/tmp/tmp.tyqh75apIl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nFVqAzwyPn ++ cat /tmp/tmp.tyqh75apIl ++ rm /tmp/tmp.nFVqAzwyPn /tmp/tmp.tyqh75apIl ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GFEAG2us6m ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.kPNv95GLkz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GFEAG2us6m +++++ cat /tmp/tmp.kPNv95GLkz +++++ rm /tmp/tmp.GFEAG2us6m /tmp/tmp.kPNv95GLkz +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.uIRL1QX6vy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.FYmquhCz2C +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.uIRL1QX6vy +++++ cat /tmp/tmp.FYmquhCz2C +++++ rm /tmp/tmp.uIRL1QX6vy /tmp/tmp.FYmquhCz2C +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aIpKXmihjW +++ mktemp ++ local LAST_ERR=/tmp/tmp.5OYtavg8cy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aIpKXmihjW ++ cat /tmp/tmp.5OYtavg8cy ++ rm /tmp/tmp.aIpKXmihjW /tmp/tmp.5OYtavg8cy ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Y6Yekynqr +++ mktemp ++ local LAST_ERR=/tmp/tmp.lIFhux2xza ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Y6Yekynqr ++ cat /tmp/tmp.lIFhux2xza ++ rm /tmp/tmp.3Y6Yekynqr /tmp/tmp.lIFhux2xza ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql /tmp/tmp.AxwkEOwxIb/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jzo0c5Q4d6 ++ mktemp + local LAST_ERR=/tmp/tmp.flzkMaCnV7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jzo0c5Q4d6 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.flzkMaCnV7 + rm /tmp/tmp.jzo0c5Q4d6 /tmp/tmp.flzkMaCnV7 + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CFN70zQSIQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.3PTHzwUGoQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CFN70zQSIQ ++ cat /tmp/tmp.3PTHzwUGoQ ++ rm /tmp/tmp.CFN70zQSIQ /tmp/tmp.3PTHzwUGoQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FoPlXvZZZA +++ mktemp ++ local LAST_ERR=/tmp/tmp.5zlvoOYuQJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FoPlXvZZZA ++ cat /tmp/tmp.5zlvoOYuQJ ++ rm /tmp/tmp.FoPlXvZZZA /tmp/tmp.5zlvoOYuQJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bF0Dm4CyIN +++ mktemp ++ local LAST_ERR=/tmp/tmp.JfOcgnmiQE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bF0Dm4CyIN ++ cat /tmp/tmp.JfOcgnmiQE ++ rm /tmp/tmp.bF0Dm4CyIN /tmp/tmp.JfOcgnmiQE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IbVocyzz0Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.rG6ZeLQNbV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IbVocyzz0Y ++ cat /tmp/tmp.rG6ZeLQNbV ++ rm /tmp/tmp.IbVocyzz0Y /tmp/tmp.rG6ZeLQNbV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u6VPUKuxkn +++ mktemp ++ local LAST_ERR=/tmp/tmp.ysOeN9yGl7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u6VPUKuxkn ++ cat /tmp/tmp.ysOeN9yGl7 ++ rm /tmp/tmp.u6VPUKuxkn /tmp/tmp.ysOeN9yGl7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fn1mGOP8D4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.XU78SGjcdl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Fn1mGOP8D4 ++ cat /tmp/tmp.XU78SGjcdl ++ rm /tmp/tmp.Fn1mGOP8D4 /tmp/tmp.XU78SGjcdl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Num5aKLyjH +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gk1mZjvNe2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Num5aKLyjH ++ cat /tmp/tmp.Gk1mZjvNe2 ++ rm /tmp/tmp.Num5aKLyjH /tmp/tmp.Gk1mZjvNe2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mTqVqznc5t +++ mktemp ++ local LAST_ERR=/tmp/tmp.aha7HLdK1F ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mTqVqznc5t ++ cat /tmp/tmp.aha7HLdK1F ++ rm /tmp/tmp.mTqVqznc5t /tmp/tmp.aha7HLdK1F ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5WCcnq1B0n +++ mktemp ++ local LAST_ERR=/tmp/tmp.T9aFqus9kr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5WCcnq1B0n ++ cat /tmp/tmp.T9aFqus9kr ++ rm /tmp/tmp.5WCcnq1B0n /tmp/tmp.T9aFqus9kr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZHzlqKZnxg +++ mktemp ++ local LAST_ERR=/tmp/tmp.KmRu6Jq3HN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZHzlqKZnxg ++ cat /tmp/tmp.KmRu6Jq3HN ++ rm /tmp/tmp.ZHzlqKZnxg /tmp/tmp.KmRu6Jq3HN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2ifl3K0h4x +++ mktemp ++ local LAST_ERR=/tmp/tmp.WqLQV2EC3p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2ifl3K0h4x ++ cat /tmp/tmp.WqLQV2EC3p ++ rm /tmp/tmp.2ifl3K0h4x /tmp/tmp.WqLQV2EC3p ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rhxxLtcN2r +++ mktemp ++ local LAST_ERR=/tmp/tmp.0MVYsdP8gG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rhxxLtcN2r ++ cat /tmp/tmp.0MVYsdP8gG ++ rm /tmp/tmp.rhxxLtcN2r /tmp/tmp.0MVYsdP8gG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p1BW7tYRa4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DjiXXuqyGF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p1BW7tYRa4 ++ cat /tmp/tmp.DjiXXuqyGF ++ rm /tmp/tmp.p1BW7tYRa4 /tmp/tmp.DjiXXuqyGF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lOpHtYImGu +++ mktemp ++ local LAST_ERR=/tmp/tmp.Do8yP6IhLJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lOpHtYImGu ++ cat /tmp/tmp.Do8yP6IhLJ ++ rm /tmp/tmp.lOpHtYImGu /tmp/tmp.Do8yP6IhLJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H1FRZaDqlo +++ mktemp ++ local LAST_ERR=/tmp/tmp.02ayuGuSsO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H1FRZaDqlo ++ cat /tmp/tmp.02ayuGuSsO ++ rm /tmp/tmp.H1FRZaDqlo /tmp/tmp.02ayuGuSsO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l3rsxcXzXW +++ mktemp ++ local LAST_ERR=/tmp/tmp.hI7eBg9SjC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l3rsxcXzXW ++ cat /tmp/tmp.hI7eBg9SjC ++ rm /tmp/tmp.l3rsxcXzXW /tmp/tmp.hI7eBg9SjC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wHX7wuAc0l +++ mktemp ++ local LAST_ERR=/tmp/tmp.MGC2mT4lwY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wHX7wuAc0l ++ cat /tmp/tmp.MGC2mT4lwY ++ rm /tmp/tmp.wHX7wuAc0l /tmp/tmp.MGC2mT4lwY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vXLaWD8dAc +++ mktemp ++ local LAST_ERR=/tmp/tmp.QqttmdML0Z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vXLaWD8dAc ++ cat /tmp/tmp.QqttmdML0Z ++ rm /tmp/tmp.vXLaWD8dAc /tmp/tmp.QqttmdML0Z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dN5CK4HrgJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.AAxdgqK1Jh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dN5CK4HrgJ ++ cat /tmp/tmp.AAxdgqK1Jh ++ rm /tmp/tmp.dN5CK4HrgJ /tmp/tmp.AAxdgqK1Jh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jUppV9PAhf +++ mktemp ++ local LAST_ERR=/tmp/tmp.8IHX7lzBOK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jUppV9PAhf ++ cat /tmp/tmp.8IHX7lzBOK ++ rm /tmp/tmp.jUppV9PAhf /tmp/tmp.8IHX7lzBOK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W1RvFODrdY +++ mktemp ++ local LAST_ERR=/tmp/tmp.IEHCM1E9S9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W1RvFODrdY ++ cat /tmp/tmp.IEHCM1E9S9 ++ rm /tmp/tmp.W1RvFODrdY /tmp/tmp.IEHCM1E9S9 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5aLZg4bMc8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2LHeMXxzz6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5aLZg4bMc8 ++ cat /tmp/tmp.2LHeMXxzz6 ++ rm /tmp/tmp.5aLZg4bMc8 /tmp/tmp.2LHeMXxzz6 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.seYJtKpAqg ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1dYcolokrs +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.seYJtKpAqg +++++ cat /tmp/tmp.1dYcolokrs +++++ rm /tmp/tmp.seYJtKpAqg /tmp/tmp.1dYcolokrs +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Ie4XQb6pQn ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.NVHorHN5pf +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Ie4XQb6pQn +++++ cat /tmp/tmp.NVHorHN5pf +++++ rm /tmp/tmp.Ie4XQb6pQn /tmp/tmp.NVHorHN5pf +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ME8G313kb4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qGag5F504G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ME8G313kb4 ++ cat /tmp/tmp.qGag5F504G ++ rm /tmp/tmp.ME8G313kb4 /tmp/tmp.qGag5F504G ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.dBsuHVen9B ++ mktemp + local LAST_ERR=/tmp/tmp.mk6BObHI6O + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dBsuHVen9B secret/my-cluster-secrets-2 patched + cat /tmp/tmp.mk6BObHI6O + rm /tmp/tmp.dBsuHVen9B /tmp/tmp.mk6BObHI6O + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.phBsW8373P +++ mktemp ++ local LAST_ERR=/tmp/tmp.ELL3fH9vEu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.phBsW8373P ++ cat /tmp/tmp.ELL3fH9vEu ++ rm /tmp/tmp.phBsW8373P /tmp/tmp.ELL3fH9vEu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xewb7JURsO +++ mktemp ++ local LAST_ERR=/tmp/tmp.pwao7AyA3n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Xewb7JURsO ++ cat /tmp/tmp.pwao7AyA3n ++ rm /tmp/tmp.Xewb7JURsO /tmp/tmp.pwao7AyA3n ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LTmm9AJlny +++ mktemp ++ local LAST_ERR=/tmp/tmp.cNIjLxQdYB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LTmm9AJlny ++ cat /tmp/tmp.cNIjLxQdYB ++ rm /tmp/tmp.LTmm9AJlny /tmp/tmp.cNIjLxQdYB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XqTYCk7BSn +++ mktemp ++ local LAST_ERR=/tmp/tmp.1C8ePdzaBf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XqTYCk7BSn ++ cat /tmp/tmp.1C8ePdzaBf ++ rm /tmp/tmp.XqTYCk7BSn /tmp/tmp.1C8ePdzaBf ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z8lrnbj4gw +++ mktemp ++ local LAST_ERR=/tmp/tmp.BLk9MAVOy5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z8lrnbj4gw ++ cat /tmp/tmp.BLk9MAVOy5 ++ rm /tmp/tmp.Z8lrnbj4gw /tmp/tmp.BLk9MAVOy5 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ilKT3aRIgu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bw8uZPhKRM +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ilKT3aRIgu +++++ cat /tmp/tmp.bw8uZPhKRM +++++ rm /tmp/tmp.ilKT3aRIgu /tmp/tmp.bw8uZPhKRM +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.IUaz707KXQ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.CDBfilVQJu +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.IUaz707KXQ +++++ cat /tmp/tmp.CDBfilVQJu +++++ rm /tmp/tmp.IUaz707KXQ /tmp/tmp.CDBfilVQJu +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tGmEhSeSvB +++ mktemp ++ local LAST_ERR=/tmp/tmp.wSAYN6fma1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tGmEhSeSvB ++ cat /tmp/tmp.wSAYN6fma1 ++ rm /tmp/tmp.tGmEhSeSvB /tmp/tmp.wSAYN6fma1 ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hni8rTGhNn +++ mktemp ++ local LAST_ERR=/tmp/tmp.0xlU38gxsq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hni8rTGhNn ++ cat /tmp/tmp.0xlU38gxsq ++ rm /tmp/tmp.Hni8rTGhNn /tmp/tmp.0xlU38gxsq ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql /tmp/tmp.AxwkEOwxIb/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vo0BFMXaru +++ mktemp ++ local LAST_ERR=/tmp/tmp.cWI7rWEGFO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vo0BFMXaru ++ cat /tmp/tmp.cWI7rWEGFO ++ rm /tmp/tmp.Vo0BFMXaru /tmp/tmp.cWI7rWEGFO ++ return 0 + newpass='J}LNI0v.j<+D7h6w' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''J}LNI0v.j<+D7h6w'\'';' '-h some-name-pxc -uroot -p'\''J}LNI0v.j<+D7h6w'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''J}LNI0v.j<+D7h6w'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''J}LNI0v.j<+D7h6w'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CvF4YljXhT +++ mktemp ++ local LAST_ERR=/tmp/tmp.ru086SKwWY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CvF4YljXhT ++ cat /tmp/tmp.ru086SKwWY ++ rm /tmp/tmp.CvF4YljXhT /tmp/tmp.ru086SKwWY ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''J}LNI0v.j<+D7h6w'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''J}LNI0v.j<+D7h6w'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''J}LNI0v.j<+D7h6w'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''J}LNI0v.j<+D7h6w'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HOqZvoUQLF +++ mktemp ++ local LAST_ERR=/tmp/tmp.498W1qayC3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HOqZvoUQLF ++ cat /tmp/tmp.498W1qayC3 ++ rm /tmp/tmp.HOqZvoUQLF /tmp/tmp.498W1qayC3 ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql /tmp/tmp.AxwkEOwxIb/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.1G5O4yRIA4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fIdKRuAXul ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1G5O4yRIA4 ++ cat /tmp/tmp.fIdKRuAXul ++ rm /tmp/tmp.1G5O4yRIA4 /tmp/tmp.fIdKRuAXul ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.3RFwoVzPCu ++ mktemp + local LAST_ERR=/tmp/tmp.QfaemlEw8u + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3RFwoVzPCu secret/my-cluster-secrets-2 configured + cat /tmp/tmp.QfaemlEw8u Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.3RFwoVzPCu /tmp/tmp.QfaemlEw8u + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0UISk6H2ly +++ mktemp ++ local LAST_ERR=/tmp/tmp.kHASeqlrbC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0UISk6H2ly ++ cat /tmp/tmp.kHASeqlrbC ++ rm /tmp/tmp.0UISk6H2ly /tmp/tmp.kHASeqlrbC ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-4.sql /tmp/tmp.AxwkEOwxIb/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_OUT=/tmp/tmp.RaCeJL3mka + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-16376~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2260-a2c7ae05#' + local LAST_ERR=/tmp/tmp.r5F97M1UjE + local exit_status=0 + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RaCeJL3mka perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.r5F97M1UjE + rm /tmp/tmp.RaCeJL3mka /tmp/tmp.r5F97M1UjE + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0dUQ9CJPUP +++ mktemp ++ local LAST_ERR=/tmp/tmp.AWedfZKIPa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0dUQ9CJPUP ++ cat /tmp/tmp.AWedfZKIPa ++ rm /tmp/tmp.0dUQ9CJPUP /tmp/tmp.AWedfZKIPa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v2RGd4uxun +++ mktemp ++ local LAST_ERR=/tmp/tmp.5rimZLLfYF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v2RGd4uxun ++ cat /tmp/tmp.5rimZLLfYF ++ rm /tmp/tmp.v2RGd4uxun /tmp/tmp.5rimZLLfYF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Cay3zbBIjS +++ mktemp ++ local LAST_ERR=/tmp/tmp.65wunUryCK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Cay3zbBIjS ++ cat /tmp/tmp.65wunUryCK ++ rm /tmp/tmp.Cay3zbBIjS /tmp/tmp.65wunUryCK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.obrwpTpN9z +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q5FwcNzFP2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.obrwpTpN9z ++ cat /tmp/tmp.Q5FwcNzFP2 ++ rm /tmp/tmp.obrwpTpN9z /tmp/tmp.Q5FwcNzFP2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KxTjymbTdz +++ mktemp ++ local LAST_ERR=/tmp/tmp.i3dXce5lyJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KxTjymbTdz ++ cat /tmp/tmp.i3dXce5lyJ ++ rm /tmp/tmp.KxTjymbTdz /tmp/tmp.i3dXce5lyJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BkWUc4Gbrc +++ mktemp ++ local LAST_ERR=/tmp/tmp.4i5aJNKCET ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BkWUc4Gbrc ++ cat /tmp/tmp.4i5aJNKCET ++ rm /tmp/tmp.BkWUc4Gbrc /tmp/tmp.4i5aJNKCET ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H5NpWNqpW4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tNxWaONYxr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H5NpWNqpW4 ++ cat /tmp/tmp.tNxWaONYxr ++ rm /tmp/tmp.H5NpWNqpW4 /tmp/tmp.tNxWaONYxr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SeVs3pci8R +++ mktemp ++ local LAST_ERR=/tmp/tmp.2i7TwsBZKC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SeVs3pci8R ++ cat /tmp/tmp.2i7TwsBZKC ++ rm /tmp/tmp.SeVs3pci8R /tmp/tmp.2i7TwsBZKC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2pZCnogzT2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mzMXMkt7m7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2pZCnogzT2 ++ cat /tmp/tmp.mzMXMkt7m7 ++ rm /tmp/tmp.2pZCnogzT2 /tmp/tmp.mzMXMkt7m7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FFtFQIg1JP +++ mktemp ++ local LAST_ERR=/tmp/tmp.rkDb6Y0wyV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FFtFQIg1JP ++ cat /tmp/tmp.rkDb6Y0wyV ++ rm /tmp/tmp.FFtFQIg1JP /tmp/tmp.rkDb6Y0wyV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UlvSdklGD8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7kNnu3Qk8s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UlvSdklGD8 ++ cat /tmp/tmp.7kNnu3Qk8s ++ rm /tmp/tmp.UlvSdklGD8 /tmp/tmp.7kNnu3Qk8s ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NLiFq7fVR3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cU4DduK5kD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NLiFq7fVR3 ++ cat /tmp/tmp.cU4DduK5kD ++ rm /tmp/tmp.NLiFq7fVR3 /tmp/tmp.cU4DduK5kD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tZKwYumCdb +++ mktemp ++ local LAST_ERR=/tmp/tmp.nb3APnHtp2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tZKwYumCdb ++ cat /tmp/tmp.nb3APnHtp2 ++ rm /tmp/tmp.tZKwYumCdb /tmp/tmp.nb3APnHtp2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EfXbFiwmh7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Uo1GcG7Iwk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EfXbFiwmh7 ++ cat /tmp/tmp.Uo1GcG7Iwk ++ rm /tmp/tmp.EfXbFiwmh7 /tmp/tmp.Uo1GcG7Iwk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jkIWajg3el +++ mktemp ++ local LAST_ERR=/tmp/tmp.8NhDrE7Ewl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jkIWajg3el ++ cat /tmp/tmp.8NhDrE7Ewl ++ rm /tmp/tmp.jkIWajg3el /tmp/tmp.8NhDrE7Ewl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GplPsSnk9s +++ mktemp ++ local LAST_ERR=/tmp/tmp.olofq0D8tI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GplPsSnk9s ++ cat /tmp/tmp.olofq0D8tI ++ rm /tmp/tmp.GplPsSnk9s /tmp/tmp.olofq0D8tI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yURGgOKGc6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ukYFI35Tpb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yURGgOKGc6 ++ cat /tmp/tmp.ukYFI35Tpb ++ rm /tmp/tmp.yURGgOKGc6 /tmp/tmp.ukYFI35Tpb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oKzgLNT48L +++ mktemp ++ local LAST_ERR=/tmp/tmp.FTdFkKUamV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oKzgLNT48L ++ cat /tmp/tmp.FTdFkKUamV ++ rm /tmp/tmp.oKzgLNT48L /tmp/tmp.FTdFkKUamV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ShEFamjeT2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kW5SMyxg19 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ShEFamjeT2 ++ cat /tmp/tmp.kW5SMyxg19 ++ rm /tmp/tmp.ShEFamjeT2 /tmp/tmp.kW5SMyxg19 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YHV9rsaq0m +++ mktemp ++ local LAST_ERR=/tmp/tmp.j6xk7zdn4c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YHV9rsaq0m ++ cat /tmp/tmp.j6xk7zdn4c ++ rm /tmp/tmp.YHV9rsaq0m /tmp/tmp.j6xk7zdn4c ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r2SCjcueUO +++ mktemp ++ local LAST_ERR=/tmp/tmp.f61TgmSAl3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r2SCjcueUO ++ cat /tmp/tmp.f61TgmSAl3 ++ rm /tmp/tmp.r2SCjcueUO /tmp/tmp.f61TgmSAl3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KGlr0TjT8S +++ mktemp ++ local LAST_ERR=/tmp/tmp.lhTMgi3oMq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KGlr0TjT8S ++ cat /tmp/tmp.lhTMgi3oMq ++ rm /tmp/tmp.KGlr0TjT8S /tmp/tmp.lhTMgi3oMq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RDRMO4Sitr +++ mktemp ++ local LAST_ERR=/tmp/tmp.PTL4urU7VO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RDRMO4Sitr ++ cat /tmp/tmp.PTL4urU7VO ++ rm /tmp/tmp.RDRMO4Sitr /tmp/tmp.PTL4urU7VO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.njEZO3bACF +++ mktemp ++ local LAST_ERR=/tmp/tmp.RuQV4jXE6b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.njEZO3bACF ++ cat /tmp/tmp.RuQV4jXE6b ++ rm /tmp/tmp.njEZO3bACF /tmp/tmp.RuQV4jXE6b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wM3jilpE7Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.QvAhFA1QOi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wM3jilpE7Q ++ cat /tmp/tmp.QvAhFA1QOi ++ rm /tmp/tmp.wM3jilpE7Q /tmp/tmp.QvAhFA1QOi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CUXdl5i5rC +++ mktemp ++ local LAST_ERR=/tmp/tmp.kVWaAn31tT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CUXdl5i5rC ++ cat /tmp/tmp.kVWaAn31tT ++ rm /tmp/tmp.CUXdl5i5rC /tmp/tmp.kVWaAn31tT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tMzmZ1f96b +++ mktemp ++ local LAST_ERR=/tmp/tmp.zXFADACyTR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tMzmZ1f96b ++ cat /tmp/tmp.zXFADACyTR ++ rm /tmp/tmp.tMzmZ1f96b /tmp/tmp.zXFADACyTR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QhFCJXM9vx +++ mktemp ++ local LAST_ERR=/tmp/tmp.z19GexFpYK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QhFCJXM9vx ++ cat /tmp/tmp.z19GexFpYK ++ rm /tmp/tmp.QhFCJXM9vx /tmp/tmp.z19GexFpYK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EzRZphZoZh +++ mktemp ++ local LAST_ERR=/tmp/tmp.oobtTlQBwr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EzRZphZoZh ++ cat /tmp/tmp.oobtTlQBwr ++ rm /tmp/tmp.EzRZphZoZh /tmp/tmp.oobtTlQBwr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mDpuBTrVdm +++ mktemp ++ local LAST_ERR=/tmp/tmp.BwdE8eQgM1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mDpuBTrVdm ++ cat /tmp/tmp.BwdE8eQgM1 ++ rm /tmp/tmp.mDpuBTrVdm /tmp/tmp.BwdE8eQgM1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Bv0pzXQC0y +++ mktemp ++ local LAST_ERR=/tmp/tmp.V2WxvvCGOv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Bv0pzXQC0y ++ cat /tmp/tmp.V2WxvvCGOv ++ rm /tmp/tmp.Bv0pzXQC0y /tmp/tmp.V2WxvvCGOv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T6ERalLvaI +++ mktemp ++ local LAST_ERR=/tmp/tmp.fa20ggw1Km ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T6ERalLvaI ++ cat /tmp/tmp.fa20ggw1Km ++ rm /tmp/tmp.T6ERalLvaI /tmp/tmp.fa20ggw1Km ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t0Uj0laEbN +++ mktemp ++ local LAST_ERR=/tmp/tmp.fbrhRprGXA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t0Uj0laEbN ++ cat /tmp/tmp.fbrhRprGXA ++ rm /tmp/tmp.t0Uj0laEbN /tmp/tmp.fbrhRprGXA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RornjbqZ04 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6b1nS5ndou ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RornjbqZ04 ++ cat /tmp/tmp.6b1nS5ndou ++ rm /tmp/tmp.RornjbqZ04 /tmp/tmp.6b1nS5ndou ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CG8vFSyiH4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.20K84t3o6Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CG8vFSyiH4 ++ cat /tmp/tmp.20K84t3o6Q ++ rm /tmp/tmp.CG8vFSyiH4 /tmp/tmp.20K84t3o6Q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JuJ5hPC8xD +++ mktemp ++ local LAST_ERR=/tmp/tmp.3xCw0I5Wvw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JuJ5hPC8xD ++ cat /tmp/tmp.3xCw0I5Wvw ++ rm /tmp/tmp.JuJ5hPC8xD /tmp/tmp.3xCw0I5Wvw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aOajH1bAXO +++ mktemp ++ local LAST_ERR=/tmp/tmp.h83M5dUkXM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aOajH1bAXO ++ cat /tmp/tmp.h83M5dUkXM ++ rm /tmp/tmp.aOajH1bAXO /tmp/tmp.h83M5dUkXM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0nyyT5hQ4h +++ mktemp ++ local LAST_ERR=/tmp/tmp.SBMqtnZyz3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0nyyT5hQ4h ++ cat /tmp/tmp.SBMqtnZyz3 ++ rm /tmp/tmp.0nyyT5hQ4h /tmp/tmp.SBMqtnZyz3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TynlfDDnKp +++ mktemp ++ local LAST_ERR=/tmp/tmp.yKtExwO6sB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TynlfDDnKp ++ cat /tmp/tmp.yKtExwO6sB ++ rm /tmp/tmp.TynlfDDnKp /tmp/tmp.yKtExwO6sB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.knL57qG9fl +++ mktemp ++ local LAST_ERR=/tmp/tmp.soJDqf8T4Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.knL57qG9fl ++ cat /tmp/tmp.soJDqf8T4Q ++ rm /tmp/tmp.knL57qG9fl /tmp/tmp.soJDqf8T4Q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.by3DN5ptqo +++ mktemp ++ local LAST_ERR=/tmp/tmp.VkrmnZeCtd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.by3DN5ptqo ++ cat /tmp/tmp.VkrmnZeCtd ++ rm /tmp/tmp.by3DN5ptqo /tmp/tmp.VkrmnZeCtd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pBLjMKwggV +++ mktemp ++ local LAST_ERR=/tmp/tmp.8iSHWAInRz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pBLjMKwggV ++ cat /tmp/tmp.8iSHWAInRz ++ rm /tmp/tmp.pBLjMKwggV /tmp/tmp.8iSHWAInRz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jaOifEmkxe +++ mktemp ++ local LAST_ERR=/tmp/tmp.wfZz2AV7Bv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jaOifEmkxe ++ cat /tmp/tmp.wfZz2AV7Bv ++ rm /tmp/tmp.jaOifEmkxe /tmp/tmp.wfZz2AV7Bv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TFKbVjownP +++ mktemp ++ local LAST_ERR=/tmp/tmp.YsB4J4CP6F ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TFKbVjownP ++ cat /tmp/tmp.YsB4J4CP6F ++ rm /tmp/tmp.TFKbVjownP /tmp/tmp.YsB4J4CP6F ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CdvFJ9WKR6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qdguwXpZOh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CdvFJ9WKR6 ++ cat /tmp/tmp.qdguwXpZOh ++ rm /tmp/tmp.CdvFJ9WKR6 /tmp/tmp.qdguwXpZOh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JnYGjzIyHZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.vhrCdUQGfK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JnYGjzIyHZ ++ cat /tmp/tmp.vhrCdUQGfK ++ rm /tmp/tmp.JnYGjzIyHZ /tmp/tmp.vhrCdUQGfK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.leRrfJLPlK +++ mktemp ++ local LAST_ERR=/tmp/tmp.rEYSWOvn9B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.leRrfJLPlK ++ cat /tmp/tmp.rEYSWOvn9B ++ rm /tmp/tmp.leRrfJLPlK /tmp/tmp.rEYSWOvn9B ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m2omzLBe2s +++ mktemp ++ local LAST_ERR=/tmp/tmp.9MYvvLULJx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m2omzLBe2s ++ cat /tmp/tmp.9MYvvLULJx ++ rm /tmp/tmp.m2omzLBe2s /tmp/tmp.9MYvvLULJx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lligSoXgXO +++ mktemp ++ local LAST_ERR=/tmp/tmp.PiZaJwD4Qk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lligSoXgXO ++ cat /tmp/tmp.PiZaJwD4Qk ++ rm /tmp/tmp.lligSoXgXO /tmp/tmp.PiZaJwD4Qk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lf1ASlV7YZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.eAir8BQmSO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lf1ASlV7YZ ++ cat /tmp/tmp.eAir8BQmSO ++ rm /tmp/tmp.Lf1ASlV7YZ /tmp/tmp.eAir8BQmSO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ajhLTD1KK5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cnMpx96EgB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ajhLTD1KK5 ++ cat /tmp/tmp.cnMpx96EgB ++ rm /tmp/tmp.ajhLTD1KK5 /tmp/tmp.cnMpx96EgB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PZm5SSpA3p +++ mktemp ++ local LAST_ERR=/tmp/tmp.4qPDDEXd4O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PZm5SSpA3p ++ cat /tmp/tmp.4qPDDEXd4O ++ rm /tmp/tmp.PZm5SSpA3p /tmp/tmp.4qPDDEXd4O ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 51 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NhNVm1G7vF +++ mktemp ++ local LAST_ERR=/tmp/tmp.JXphEMOzYD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NhNVm1G7vF ++ cat /tmp/tmp.JXphEMOzYD ++ rm /tmp/tmp.NhNVm1G7vF /tmp/tmp.JXphEMOzYD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 52 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B4FZ5ec1tS +++ mktemp ++ local LAST_ERR=/tmp/tmp.so6b2Q9G19 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B4FZ5ec1tS ++ cat /tmp/tmp.so6b2Q9G19 ++ rm /tmp/tmp.B4FZ5ec1tS /tmp/tmp.so6b2Q9G19 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 53 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aBCn95UIHP +++ mktemp ++ local LAST_ERR=/tmp/tmp.hUEFperCIw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aBCn95UIHP ++ cat /tmp/tmp.hUEFperCIw ++ rm /tmp/tmp.aBCn95UIHP /tmp/tmp.hUEFperCIw ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.olzbfkd2x1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z3fXy1kUCE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.olzbfkd2x1 ++ cat /tmp/tmp.Z3fXy1kUCE ++ rm /tmp/tmp.olzbfkd2x1 /tmp/tmp.Z3fXy1kUCE ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Z91ZwfJ43Z ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.6Hn4hef83H +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Z91ZwfJ43Z +++++ cat /tmp/tmp.6Hn4hef83H +++++ rm /tmp/tmp.Z91ZwfJ43Z /tmp/tmp.6Hn4hef83H +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A9AXQ3cUeu +++ mktemp ++ local LAST_ERR=/tmp/tmp.MUs7FW5HVj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.A9AXQ3cUeu ++ cat /tmp/tmp.MUs7FW5HVj ++ rm /tmp/tmp.A9AXQ3cUeu /tmp/tmp.MUs7FW5HVj ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 2 haproxy some-name + local generation=2 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jJtAl2ZVFH +++ mktemp ++ local LAST_ERR=/tmp/tmp.OCYsI8vXpv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jJtAl2ZVFH ++ cat /tmp/tmp.OCYsI8vXpv ++ rm /tmp/tmp.jJtAl2ZVFH /tmp/tmp.OCYsI8vXpv ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.XjN0RE21g1 ++ mktemp + local LAST_ERR=/tmp/tmp.6AcsDdDTvl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XjN0RE21g1 secret/my-cluster-secrets patched + cat /tmp/tmp.6AcsDdDTvl + rm /tmp/tmp.XjN0RE21g1 /tmp/tmp.6AcsDdDTvl + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IMELHLlBYm +++ mktemp ++ local LAST_ERR=/tmp/tmp.grTSqBKcHN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IMELHLlBYm ++ cat /tmp/tmp.grTSqBKcHN ++ rm /tmp/tmp.IMELHLlBYm /tmp/tmp.grTSqBKcHN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z7CyLwvYXM +++ mktemp ++ local LAST_ERR=/tmp/tmp.Sv66Q7GjPa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z7CyLwvYXM ++ cat /tmp/tmp.Sv66Q7GjPa ++ rm /tmp/tmp.Z7CyLwvYXM /tmp/tmp.Sv66Q7GjPa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dKLI7Heyws +++ mktemp ++ local LAST_ERR=/tmp/tmp.EmUALOLogS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dKLI7Heyws ++ cat /tmp/tmp.EmUALOLogS ++ rm /tmp/tmp.dKLI7Heyws /tmp/tmp.EmUALOLogS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E2lq85QrrT +++ mktemp ++ local LAST_ERR=/tmp/tmp.99fyBBwjDi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E2lq85QrrT ++ cat /tmp/tmp.99fyBBwjDi ++ rm /tmp/tmp.E2lq85QrrT /tmp/tmp.99fyBBwjDi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vdHZaQhOJE +++ mktemp ++ local LAST_ERR=/tmp/tmp.MsZn0KLYqe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vdHZaQhOJE ++ cat /tmp/tmp.MsZn0KLYqe ++ rm /tmp/tmp.vdHZaQhOJE /tmp/tmp.MsZn0KLYqe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vJ5cdDqcml +++ mktemp ++ local LAST_ERR=/tmp/tmp.OR1QSiOeua ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vJ5cdDqcml ++ cat /tmp/tmp.OR1QSiOeua ++ rm /tmp/tmp.vJ5cdDqcml /tmp/tmp.OR1QSiOeua ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Nolu9LgsR3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bu4Zx5frrW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Nolu9LgsR3 ++ cat /tmp/tmp.bu4Zx5frrW ++ rm /tmp/tmp.Nolu9LgsR3 /tmp/tmp.bu4Zx5frrW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q8OBCJm01j +++ mktemp ++ local LAST_ERR=/tmp/tmp.AxnuKvEFGE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q8OBCJm01j ++ cat /tmp/tmp.AxnuKvEFGE ++ rm /tmp/tmp.Q8OBCJm01j /tmp/tmp.AxnuKvEFGE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oVBOEe9FT4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lq1nnoDR1W ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oVBOEe9FT4 ++ cat /tmp/tmp.lq1nnoDR1W ++ rm /tmp/tmp.oVBOEe9FT4 /tmp/tmp.lq1nnoDR1W ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rBSjpN9M62 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vhlt95vAxI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rBSjpN9M62 ++ cat /tmp/tmp.Vhlt95vAxI ++ rm /tmp/tmp.rBSjpN9M62 /tmp/tmp.Vhlt95vAxI ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IysLzYvRKj +++ mktemp ++ local LAST_ERR=/tmp/tmp.uTiqPKqS3A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IysLzYvRKj ++ cat /tmp/tmp.uTiqPKqS3A ++ rm /tmp/tmp.IysLzYvRKj /tmp/tmp.uTiqPKqS3A ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7hrMoCYLXE ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.IBpExPo6CY +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7hrMoCYLXE +++++ cat /tmp/tmp.IBpExPo6CY +++++ rm /tmp/tmp.7hrMoCYLXE /tmp/tmp.IBpExPo6CY +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j2X5iwnOuh +++ mktemp ++ local LAST_ERR=/tmp/tmp.cNjduOwatU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j2X5iwnOuh ++ cat /tmp/tmp.cNjduOwatU ++ rm /tmp/tmp.j2X5iwnOuh /tmp/tmp.cNjduOwatU ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-3-57.sql ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YlHWYbQmN7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ksoatBQNsc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YlHWYbQmN7 ++ cat /tmp/tmp.ksoatBQNsc ++ rm /tmp/tmp.YlHWYbQmN7 /tmp/tmp.ksoatBQNsc ++ return 0 + client_pod=pxc-client-857d976497-9m679 + wait_pod pxc-client-857d976497-9m679 + local pod=pxc-client-857d976497-9m679 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-9m679 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-9m679 condition met waiting for pod/pxc-client-857d976497-9m679 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AxwkEOwxIb/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2260/e2e-tests/users/compare/select-3.sql /tmp/tmp.AxwkEOwxIb/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 3 haproxy some-name + local generation=3 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v7wEzka4qH +++ mktemp ++ local LAST_ERR=/tmp/tmp.3AeAGiTmTV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v7wEzka4qH ++ cat /tmp/tmp.3AeAGiTmTV ++ rm /tmp/tmp.v7wEzka4qH /tmp/tmp.3AeAGiTmTV ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + destroy users-16376 + local namespace=users-16376 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + sort -u + tee /tmp/tmp.AxwkEOwxIb/operator.log + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jbwm45O5YC +++ mktemp ++ local LAST_ERR=/tmp/tmp.SvQHEZqKKe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Jbwm45O5YC ++ cat /tmp/tmp.SvQHEZqKKe ++ rm /tmp/tmp.Jbwm45O5YC /tmp/tmp.SvQHEZqKKe ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-6594d54b57-26m6j ++ mktemp + local LAST_OUT=/tmp/tmp.rZbreo9uKo ++ mktemp + local LAST_ERR=/tmp/tmp.5j7DXjMwoj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-6594d54b57-26m6j + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rZbreo9uKo + cat /tmp/tmp.5j7DXjMwoj + rm /tmp/tmp.rZbreo9uKo /tmp/tmp.5j7DXjMwoj + return 0 2025-11-25T18:16:53.760Z INFO setup Manager starting up {"gitCommit": "a2c7ae0545b8c35298a46790481af5498f2e6c1d", "gitBranch": "PR-2260-a2c7ae05", "buildTime": "2025-11-25T16:01:37Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-11-25T18:16:53.760Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1454000"} 2025-11-25T18:16:53.763Z INFO setup Registering Components. 2025-11-25T18:16:54.741Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-11-25T18:16:54.742Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-11-25T18:16:54.742Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-11-25T18:16:54.742Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-11-25T18:16:54.742Z INFO controller-runtime.metrics Starting metrics server 2025-11-25T18:16:54.742Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-11-25T18:16:54.742Z INFO controller-runtime.webhook Starting webhook server 2025-11-25T18:16:54.742Z INFO setup Starting the Cmd. 2025-11-25T18:16:54.742Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-11-25T18:16:54.842Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-11-25T18:16:54.882Z DEBUG events percona-xtradb-cluster-operator-6594d54b57-26m6j_0935f51c-d238-4737-a245-58415392da7b became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"b63a5d19-090d-46c4-8101-d59f6897f88f","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1764094614874255009"}, "reason": "LeaderElection"} 2025-11-25T18:16:54.882Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-11-25T18:16:54.882Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-11-25T18:16:54.883Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-11-25T18:16:54.883Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-11-25T18:16:54.883Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-11-25T18:16:54.983Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-11-25T18:16:54.983Z INFO Starting Controller {"controller": "pxc-controller"} 2025-11-25T18:16:54.983Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-11-25T18:16:54.983Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-11-25T18:16:54.983Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-11-25T18:16:54.983Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-11-25T18:17:30.301Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "740edd47-22d6-4d81-90f8-92636b9fe9bc", "version": "1.19.0"} 2025-11-25T18:17:30.567Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "740edd47-22d6-4d81-90f8-92636b9fe9bc", "secrets": "my-cluster-secrets"} 2025-11-25T18:17:30.786Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "740edd47-22d6-4d81-90f8-92636b9fe9bc", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-25T18:17:30.805Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "740edd47-22d6-4d81-90f8-92636b9fe9bc", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-25T18:17:31.364Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "740edd47-22d6-4d81-90f8-92636b9fe9bc", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-25T18:17:31.531Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "eaef4d31-8c04-48c5-9e99-5c25e7ed287c", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-25T18:17:31.578Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "eaef4d31-8c04-48c5-9e99-5c25e7ed287c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-25T18:17:31.678Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "eaef4d31-8c04-48c5-9e99-5c25e7ed287c", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-25T18:17:31.726Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "eaef4d31-8c04-48c5-9e99-5c25e7ed287c", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-25T18:17:31.801Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "eaef4d31-8c04-48c5-9e99-5c25e7ed287c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-25T18:17:31.946Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "eaef4d31-8c04-48c5-9e99-5c25e7ed287c", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-25T18:17:33.477Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "6c9f5b2e-d480-4546-9959-2c69b1237b51", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-25T18:17:33.970Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "6c9f5b2e-d480-4546-9959-2c69b1237b51", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-25T18:18:45.153Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "b004e28d-100b-4dca-84db-0e5bedc0d5f7", "user": "operator"} 2025-11-25T18:18:45.193Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "b004e28d-100b-4dca-84db-0e5bedc0d5f7", "user": "monitor"} 2025-11-25T18:18:45.238Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "b004e28d-100b-4dca-84db-0e5bedc0d5f7"} 2025-11-25T18:18:45.285Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "b004e28d-100b-4dca-84db-0e5bedc0d5f7", "user": "xtrabackup"} 2025-11-25T18:18:45.341Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "b004e28d-100b-4dca-84db-0e5bedc0d5f7"} 2025-11-25T18:18:45.350Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "b004e28d-100b-4dca-84db-0e5bedc0d5f7", "err": "get primary pxc pod: not found"} 2025-11-25T18:18:50.109Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "0d10f7c9-f39f-4a68-bc90-e64506cc50f6", "err": "get primary pxc pod: not found"} 2025-11-25T18:18:55.301Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "4934a946-ad27-449e-89a5-a9a6df009873", "err": "get primary pxc pod: not found"} 2025-11-25T18:19:00.471Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "98cf826b-ff19-4c40-be03-29332ab5a370", "err": "get primary pxc pod: not found"} 2025-11-25T18:21:17.635Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "5e719175-42ea-4459-ab3a-3132e013bfc3", "user": "root"} 2025-11-25T18:21:17.686Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "5e719175-42ea-4459-ab3a-3132e013bfc3", "user": "replication"} 2025-11-25T18:21:17.736Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "5e719175-42ea-4459-ab3a-3132e013bfc3", "new version": "5.7.44-48-57"} 2025-11-25T18:21:19.679Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "5e719175-42ea-4459-ab3a-3132e013bfc3"} 2025-11-25T18:21:24.307Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "c91fccdc-59b9-423b-a8bb-1e2a55653f9c"} 2025-11-25T18:21:29.575Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "0957d025-58df-4794-85f2-d9f739ef1094"} 2025-11-25T18:21:35.665Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "9a19f133-1d8b-4ff8-81f2-8c0102c9faf4"} 2025-11-25T18:21:40.988Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "23ac0a19-c6d6-409e-8ad1-cca8a260c6a8"} 2025-11-25T18:21:46.287Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "24977f27-5259-494c-aee5-2f8355933311"} 2025-11-25T18:21:51.497Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "4a046034-f72f-4540-a87d-27ec93823a80"} 2025-11-25T18:21:56.778Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e8823846-4a06-44d0-854d-2cd2be383ee7"} 2025-11-25T18:22:02.090Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e046e121-fce3-4f56-a5db-2fca0e3d4487"} 2025-11-25T18:22:07.498Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "dd7d87c9-2425-4769-8eea-7b933f8997e3"} 2025-11-25T18:22:12.715Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "7af79551-d600-4597-b654-f3c677104de6"} 2025-11-25T18:22:18.279Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "5ad40304-117c-4594-b103-6ed26302c366"} 2025-11-25T18:22:23.189Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "d27f2210-72e4-4f89-a64c-5d064ca1e2f1"} 2025-11-25T18:22:28.389Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "afa52d56-f14e-4937-a1ad-f59893d2b362"} 2025-11-25T18:22:34.383Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "17b37715-1c31-43e4-bc7d-0ea6519cfe3c"} 2025-11-25T18:22:35.198Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "811c464a-ccdc-4e55-afe8-edf771545091", "user": "root"} 2025-11-25T18:22:35.227Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "811c464a-ccdc-4e55-afe8-edf771545091", "user": "root"} 2025-11-25T18:22:35.244Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "811c464a-ccdc-4e55-afe8-edf771545091", "secret": "some-name-mysql-init", "user": "root"} 2025-11-25T18:22:37.815Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "811c464a-ccdc-4e55-afe8-edf771545091"} 2025-11-25T18:22:37.839Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "811c464a-ccdc-4e55-afe8-edf771545091", "user": "root"} 2025-11-25T18:22:39.492Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "811c464a-ccdc-4e55-afe8-edf771545091"} 2025-11-25T18:22:44.874Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "0e66b1fc-1b01-4d67-9671-55c547bf5de6"} 2025-11-25T18:22:50.261Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e3cb7851-76d7-4640-a14e-675079fe6faf"} 2025-11-25T18:22:53.805Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "d529ba89-e272-4a65-9ad5-634970616fb4", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:22:53.862Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "d529ba89-e272-4a65-9ad5-634970616fb4", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:22:56.878Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "d529ba89-e272-4a65-9ad5-634970616fb4", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:974\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-25T18:23:19.266Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "97e64ff0-0ade-4315-b538-75e2e5ba9b18", "user": "proxyadmin"} 2025-11-25T18:23:19.266Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "97e64ff0-0ade-4315-b538-75e2e5ba9b18", "user": "proxyadmin"} 2025-11-25T18:23:19.323Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "97e64ff0-0ade-4315-b538-75e2e5ba9b18", "user": "proxyadmin"} 2025-11-25T18:23:19.348Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "97e64ff0-0ade-4315-b538-75e2e5ba9b18", "user": "proxyadmin"} 2025-11-25T18:23:19.348Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "97e64ff0-0ade-4315-b538-75e2e5ba9b18", "last-applied-secret": "67177007c63c5dc8f925ec343158b3023ae106fd56533a3706e74c1e78bccd7f"} 2025-11-25T18:23:19.352Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "97e64ff0-0ade-4315-b538-75e2e5ba9b18", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:23:20.684Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "88fda2c2-974a-403d-8f65-513f7655a0ac", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:509) : Galera hostgroup retrieval failed. \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:509) : Galera hostgroup retrieval failed. \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:974\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-25T18:23:38.452Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "733f9088-04ce-4ef4-9bcb-18df531f440d", "err": "get primary pxc pod: not found"} 2025-11-25T18:23:43.795Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "54d17df7-66d4-4df4-b96f-a3bacfc8875d", "err": "get primary pxc pod: not found"} 2025-11-25T18:23:49.531Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "599a2f06-599e-4da9-865f-f02643d63c0c", "err": "get primary pxc pod: not found"} 2025-11-25T18:23:55.140Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "6879a97e-4122-4834-87c0-ed70203c3898", "err": "get primary pxc pod: not found"} 2025-11-25T18:23:59.307Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "26339e8a-5987-4535-a298-bfc01faf95da"} 2025-11-25T18:24:03.409Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "9ee799fc-23ff-439c-b7cc-85e424593e22"} 2025-11-25T18:24:06.127Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "68905e51-0439-478f-af96-ad140db9f911", "user": "xtrabackup"} 2025-11-25T18:24:06.147Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "68905e51-0439-478f-af96-ad140db9f911", "user": "xtrabackup"} 2025-11-25T18:24:06.174Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "68905e51-0439-478f-af96-ad140db9f911", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-25T18:24:06.199Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "68905e51-0439-478f-af96-ad140db9f911", "user": "xtrabackup"} 2025-11-25T18:24:06.199Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "68905e51-0439-478f-af96-ad140db9f911", "last-applied-secret": "528a9a402270ecbebfad1ad21b6d4bcf7e5eae360eb863916ace10087b2cffd8"} 2025-11-25T18:24:06.200Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "68905e51-0439-478f-af96-ad140db9f911", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:24:06.264Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "68905e51-0439-478f-af96-ad140db9f911", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:24:09.148Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "68905e51-0439-478f-af96-ad140db9f911"} 2025-11-25T18:25:55.349Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f4914d8f-4fd9-4715-855c-0b708cc49142", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-16376 on 34.118.224.10:53: no such host"} 2025-11-25T18:26:00.609Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "44c5e9b1-4a32-4b5a-a7c6-a9884165dc3b", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-16376 on 34.118.224.10:53: no such host"} 2025-11-25T18:26:05.799Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "8dca1056-97f4-41c9-9a58-0dfb5701994f", "primary name": "some-name-pxc-0.some-name-pxc.users-16376.svc.cluster.local"} 2025-11-25T18:26:10.942Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "8de5d3a6-f8e6-41bc-b993-dc961828b6ab", "primary name": "some-name-pxc-0.some-name-pxc.users-16376.svc.cluster.local"} 2025-11-25T18:26:16.128Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "ae8dd779-902f-4d06-a5a9-ddbdb3f86dd9", "primary name": "some-name-pxc-0.some-name-pxc.users-16376.svc.cluster.local"} 2025-11-25T18:26:21.324Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e94d4801-550c-4ff0-ab49-1212a0ad80af", "primary name": "some-name-pxc-0.some-name-pxc.users-16376.svc.cluster.local"} 2025-11-25T18:26:26.490Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "62614144-0230-4dc6-abec-64eb7bcd33ae", "primary name": "some-name-pxc-0.some-name-pxc.users-16376.svc.cluster.local"} 2025-11-25T18:26:31.726Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "84847359-746c-4168-a96d-145edd81d283", "primary name": "some-name-pxc-0.some-name-pxc.users-16376.svc.cluster.local"} 2025-11-25T18:26:36.882Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "cccfce5d-bcec-43c9-8aa9-762c7736cf69", "primary name": "some-name-pxc-0.some-name-pxc.users-16376.svc.cluster.local"} 2025-11-25T18:26:42.085Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61f37240-3571-406f-a2c9-732256d9b244", "primary name": "some-name-pxc-0.some-name-pxc.users-16376.svc.cluster.local"} 2025-11-25T18:26:50.031Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "113b8e9b-f9ae-4506-b157-e7c926dda848"} 2025-11-25T18:26:52.088Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "3e90ccae-7fcd-4680-88c2-c6a877a3655d", "user": "monitor"} 2025-11-25T18:26:52.109Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "3e90ccae-7fcd-4680-88c2-c6a877a3655d", "user": "monitor"} 2025-11-25T18:26:52.167Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "3e90ccae-7fcd-4680-88c2-c6a877a3655d", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-25T18:26:52.199Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "3e90ccae-7fcd-4680-88c2-c6a877a3655d", "user": "monitor"} 2025-11-25T18:26:52.232Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "3e90ccae-7fcd-4680-88c2-c6a877a3655d", "user": "monitor"} 2025-11-25T18:26:52.232Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "3e90ccae-7fcd-4680-88c2-c6a877a3655d", "last-applied-secret": "63806d91dd2b2e294bf28b2e76a83ab868e853c354438b3ac563ac635f7e73ab"} 2025-11-25T18:26:52.239Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "3e90ccae-7fcd-4680-88c2-c6a877a3655d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:27:42.781Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "9cdcc0f0-9968-4a8f-98c0-1f5d77e20c39"} 2025-11-25T18:27:47.782Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "a817a4f0-f3cc-4331-9170-a1d3c3106b05"} 2025-11-25T18:27:53.484Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "508d7117-decd-42c2-8fa6-8dadbe3a7319"} 2025-11-25T18:27:59.103Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "0711107e-b9d4-4ee1-8598-a4e920805a39"} 2025-11-25T18:27:59.160Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e94d08bb-5e24-4a08-a84e-d09513d74a05", "user": "operator"} 2025-11-25T18:27:59.179Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e94d08bb-5e24-4a08-a84e-d09513d74a05", "user": "operator"} 2025-11-25T18:27:59.203Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e94d08bb-5e24-4a08-a84e-d09513d74a05", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-25T18:27:59.222Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e94d08bb-5e24-4a08-a84e-d09513d74a05", "user": "operator"} 2025-11-25T18:27:59.222Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e94d08bb-5e24-4a08-a84e-d09513d74a05", "last-applied-secret": "9d1beb732e0b5ec0925c801708ff78260f456806c61ca13b0a4d3db6cf2983f3"} 2025-11-25T18:27:59.226Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e94d08bb-5e24-4a08-a84e-d09513d74a05", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:28:02.960Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e94d08bb-5e24-4a08-a84e-d09513d74a05", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16376.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:974\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-25T18:28:33.208Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "7265344f-b5d1-4e2e-a025-4df8b9666389"} 2025-11-25T18:28:37.249Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "34c8ecac-601a-491d-b8a3-5ab13317ad82"} 2025-11-25T18:28:42.759Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "0faefe15-b91e-4478-ac2b-a1b7033e0f47"} 2025-11-25T18:28:48.049Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "acc85273-4607-46cf-9046-5c38b494898b"} 2025-11-25T18:28:48.570Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "secrets": "my-cluster-secrets-2"} 2025-11-25T18:28:48.570Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "root"} 2025-11-25T18:28:48.599Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "root"} 2025-11-25T18:28:48.644Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "secret": "some-name-mysql-init", "user": "root"} 2025-11-25T18:28:51.205Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145"} 2025-11-25T18:28:51.226Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "root"} 2025-11-25T18:28:51.226Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "operator"} 2025-11-25T18:28:51.245Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "operator"} 2025-11-25T18:28:51.261Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-25T18:28:51.282Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "operator"} 2025-11-25T18:28:51.282Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "monitor"} 2025-11-25T18:28:51.299Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "monitor"} 2025-11-25T18:28:51.316Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-25T18:28:51.347Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "monitor"} 2025-11-25T18:28:51.375Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "monitor"} 2025-11-25T18:28:51.375Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "xtrabackup"} 2025-11-25T18:28:51.393Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "xtrabackup"} 2025-11-25T18:28:51.414Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-25T18:28:51.431Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "xtrabackup"} 2025-11-25T18:28:51.431Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "replication"} 2025-11-25T18:28:51.449Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "replication"} 2025-11-25T18:28:51.469Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-25T18:28:51.490Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "replication"} 2025-11-25T18:28:51.490Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "proxyadmin"} 2025-11-25T18:28:51.521Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "proxyadmin"} 2025-11-25T18:28:51.540Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "user": "proxyadmin"} 2025-11-25T18:28:51.540Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "last-applied-secret": "01cedd75fda1a6ad458ea39c0839bad561023165fb31eea81dced689dc2958b5"} 2025-11-25T18:28:51.540Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "last-applied-secret": "01cedd75fda1a6ad458ea39c0839bad561023165fb31eea81dced689dc2958b5"} 2025-11-25T18:28:51.543Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:28:51.591Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:28:53.906Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "e6a8cc7e-604c-4634-9716-a68d6dd4c145", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:974\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-25T18:31:06.012Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "098e517f-025e-4166-bd02-a15c01c8f8d2", "primary name": "some-name-pxc-0.some-name-pxc.users-16376.svc.cluster.local"} 2025-11-25T18:31:13.819Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "d658c9c4-46bd-404a-bba5-9eaa193a2ea5"} 2025-11-25T18:31:17.760Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f3319772-3491-4e64-a023-cb3aa883bc3d", "user": "operator"} 2025-11-25T18:31:17.781Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f3319772-3491-4e64-a023-cb3aa883bc3d", "user": "operator"} 2025-11-25T18:31:17.801Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f3319772-3491-4e64-a023-cb3aa883bc3d", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-25T18:31:17.820Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f3319772-3491-4e64-a023-cb3aa883bc3d", "user": "operator"} 2025-11-25T18:31:17.820Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f3319772-3491-4e64-a023-cb3aa883bc3d", "last-applied-secret": "9e114d19b824f777a9df4e929c38746fbb4040f5060f69987331f697adbbab46"} 2025-11-25T18:31:17.824Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f3319772-3491-4e64-a023-cb3aa883bc3d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:31:20.452Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "13ab4d44-8868-4c18-aefb-3474b5ee451e", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:974\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-25T18:31:57.645Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "a38e2ec1-5281-4610-95c6-e6535ef9ad42"} 2025-11-25T18:32:01.961Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "3b74f425-bb51-4c5d-9256-28eeb6f82b90"} 2025-11-25T18:32:06.966Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "37c36523-f8ac-4e82-9b8b-89fa8cd00461"} 2025-11-25T18:32:12.265Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "cbca469b-f5ec-40b8-9cd7-fb8c9754abce"} 2025-11-25T18:32:17.657Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "ff5707bf-fbfc-4323-bdba-3cbabca7a0a6"} 2025-11-25T18:32:23.069Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f0fdefdd-f5b9-4d51-9fc7-2e69df6c384b"} 2025-11-25T18:32:28.743Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "4a9b604f-e579-408c-b4db-4793a1616d4e"} 2025-11-25T18:32:33.594Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1605784a-48b2-43cb-bae3-d6575d9a5a4e"} 2025-11-25T18:32:38.833Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "6d002eae-819c-4d58-a3c6-d195ec460cba"} 2025-11-25T18:32:43.978Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "c48fd5fd-4b40-441f-b9a0-6772ddcbec6b"} 2025-11-25T18:32:49.579Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "bca8dd63-028b-4567-b4b3-4991d558d20d"} 2025-11-25T18:32:54.266Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "fb784760-c1ab-407c-ab6c-f141245e64c2"} 2025-11-25T18:32:59.772Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "bd32d1e0-50b1-41b6-88a7-c2dda0aee3cc"} 2025-11-25T18:33:05.753Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "29e9668d-b606-4d5e-bf39-aa8edec1bd24"} 2025-11-25T18:33:11.182Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "2983d718-af61-4891-82fd-b0fd5ab6a397"} 2025-11-25T18:33:11.313Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "root"} 2025-11-25T18:33:11.342Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "root"} 2025-11-25T18:33:11.365Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "secret": "some-name-mysql-init", "user": "root"} 2025-11-25T18:33:13.868Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef"} 2025-11-25T18:33:13.891Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "root"} 2025-11-25T18:33:13.891Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "monitor"} 2025-11-25T18:33:13.909Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "monitor"} 2025-11-25T18:33:13.932Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-25T18:33:13.963Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "monitor"} 2025-11-25T18:33:13.984Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "monitor"} 2025-11-25T18:33:13.984Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "xtrabackup"} 2025-11-25T18:33:14.001Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "xtrabackup"} 2025-11-25T18:33:14.018Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-25T18:33:14.040Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "xtrabackup"} 2025-11-25T18:33:14.040Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "proxyadmin"} 2025-11-25T18:33:14.072Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "proxyadmin"} 2025-11-25T18:33:14.089Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "user": "proxyadmin"} 2025-11-25T18:33:14.090Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "last-applied-secret": "16ec17f54a33500281b7e2ead5b4cb6e40fa556412e9cb9b77fe63fc8bce4567"} 2025-11-25T18:33:14.090Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "last-applied-secret": "16ec17f54a33500281b7e2ead5b4cb6e40fa556412e9cb9b77fe63fc8bce4567"} 2025-11-25T18:33:14.092Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:33:14.144Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:33:16.258Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "1494f204-0927-471f-ad08-a46848281cef", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:974\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-25T18:33:39.477Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f4aeda64-b860-41e8-8166-928a3772c807", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:33:39.530Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f4aeda64-b860-41e8-8166-928a3772c807", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-25T18:33:39.571Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f4aeda64-b860-41e8-8166-928a3772c807", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-25T18:33:39.710Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f4aeda64-b860-41e8-8166-928a3772c807", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-25T18:33:39.831Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "f4aeda64-b860-41e8-8166-928a3772c807", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-25T18:33:40.812Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "78d02437-d71d-4e2c-8054-62e319c1625e", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-25T18:36:28.421Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "root"} 2025-11-25T18:36:28.448Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "root"} 2025-11-25T18:36:28.467Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "secret": "some-name-mysql-init", "user": "root"} 2025-11-25T18:36:28.490Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "root"} 2025-11-25T18:36:28.490Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "operator"} 2025-11-25T18:36:28.508Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "operator"} 2025-11-25T18:36:28.526Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-25T18:36:28.545Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "operator"} 2025-11-25T18:36:28.545Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "monitor"} 2025-11-25T18:36:28.561Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "monitor"} 2025-11-25T18:36:28.580Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-25T18:36:28.599Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "monitor"} 2025-11-25T18:36:28.599Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "xtrabackup"} 2025-11-25T18:36:28.614Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "xtrabackup"} 2025-11-25T18:36:28.632Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-25T18:36:28.650Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "xtrabackup"} 2025-11-25T18:36:28.650Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "replication"} 2025-11-25T18:36:28.669Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "replication"} 2025-11-25T18:36:28.690Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-25T18:36:28.712Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "last-applied-secret": "9d1beb732e0b5ec0925c801708ff78260f456806c61ca13b0a4d3db6cf2983f3"} 2025-11-25T18:36:28.712Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "user": "replication"} 2025-11-25T18:36:28.712Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "last-applied-secret": "9d1beb732e0b5ec0925c801708ff78260f456806c61ca13b0a4d3db6cf2983f3"} 2025-11-25T18:36:28.715Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:36:28.759Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "61a24a49-ee1b-4454-be7d-372e9beccda7", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-25T18:38:55.366Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "6a2dc870-e99f-4d12-9f2a-4b40d107b256", "user": "monitor"} 2025-11-25T18:38:55.383Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "6a2dc870-e99f-4d12-9f2a-4b40d107b256", "user": "monitor"} 2025-11-25T18:38:55.405Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "6a2dc870-e99f-4d12-9f2a-4b40d107b256", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-25T18:38:55.429Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "6a2dc870-e99f-4d12-9f2a-4b40d107b256", "last-applied-secret": "91f02c6654d6c380d86672e6344c12fa4d37a5606147de9c5771a51a36b2fbed"} 2025-11-25T18:38:55.429Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "6a2dc870-e99f-4d12-9f2a-4b40d107b256", "user": "monitor"} 2025-11-25T18:38:55.434Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-16376", "name": "some-name", "reconcileID": "6a2dc870-e99f-4d12-9f2a-4b40d107b256", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 [mysql] 2025/11/25 18:38:26 packets.go:58 unexpected EOF sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 -  }, -  { -  }, -  { -  }, -  }, +  }, -  "01cedd75fda1a6ad458ea39c0839bad561023165fb31eea81dced689dc2958b5", +  "01cedd75fda1a6ad458ea39c0839bad561023165fb31eea81dced689dc2958b5", -  "16ec17f54a33500281b7e2ead5b4cb6e40fa556412e9cb9b77fe63fc8bce4567", +  "16ec17f54a33500281b7e2ead5b4cb6e40fa556412e9cb9b77fe63fc8bce4567", +  "1f02c6654d6c380d86672e6344c12fa4d37a5606147de9c5771a51a36b2fbed", +  "3806d91dd2b2e294bf28b2e76a83ab868e853c354438b3ac563ac635f7e73ab", -  "528a9a402270ecbebfad1ad21b6d4bcf7e5eae360eb863916ace10087b2cffd8", -  "63806d91dd2b2e294bf28b2e76a83ab868e853c354438b3ac563ac635f7e73ab", -  "7177007c63c5dc8f925ec343158b3023ae106fd56533a3706e74c1e78bccd7f", -  "9d1beb732e0b5ec0925c801708ff78260f456806c61ca13b0a4d3db6cf2983f3", +  "9d1beb732e0b5ec0925c801708ff78260f456806c61ca13b0a4d3db6cf2983f3", -  "9e114d19b824f777a9df4e929c38746fbb4040f5060f69987331f697adbbab46", +  "9e114d19b824f777a9df4e929c38746fbb4040f5060f69987331f697adbbab46", -  Annotations: map[string]string{ +  Annotations: map[string]string{ +  APIVersion: "", -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  APIVersion: "v1", -  Args: []string{"logrotate"}, +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, -  CollisionCount: &0, +  CollisionCount: nil, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-11-25 18:17:31 +0000 UTC"}, -  CreationTimestamp: v1.Time{Time: s"2025-11-25 18:33:39 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-haproxy-5f6dc6dc58", -  CurrentRevision: "some-name-haproxy-6bc895644f", -  CurrentRevision: "some-name-proxysql-5fc9b579c4", -  CurrentRevision: "some-name-proxysql-675d6c6c8d", -  CurrentRevision: "some-name-proxysql-6c9f6c8dc7", -  CurrentRevision: "some-name-proxysql-7999b56bf8", -  CurrentRevision: "some-name-proxysql-9bf6946cf", -  CurrentRevision: "some-name-proxysql-9f897cff", -  CurrentRevision: "some-name-pxc-5b878d8c49", -  CurrentRevision: "some-name-pxc-5cfcc95ccf", -  CurrentRevision: "some-name-pxc-797c9ff4d6", -  CurrentRevision: "some-name-pxc-7d859b6c85", -  "d1beb732e0b5ec0925c801708ff78260f456806c61ca13b0a4d3db6cf2983f3", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, -  Env: []v1.EnvVar{ -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Generation: 5, -  Generation: 6, -  Generation: 7, -  Generation: 8, -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  ImagePullPolicy: "Always", +  "last-applied-secret": "528a9a402270ecbebfad1ad21b6d4bcf7e5eae360eb863916ace10087b2cffd8", +  "last-applied-secret": "67177007c63c5dc8f925ec343158b3023ae106fd56533a3706e74c1e78bccd7f", +  "last-applied-secret": "9d1beb732e0b5ec0925c801708ff78260f456806c61ca13b0a4d3db6cf2983f3", +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", -  {Name: "IS_LOGCOLLECTOR", Value: "yes"}, -  {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, -  Name: "logrotate", -  Name: "logs", -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "SERVICE_TYPE", Value: "mysql"}, +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  ObservedGeneration: 5, -  ObservedGeneration: 6, -  ObservedGeneration: 7, -  ObservedGeneration: 8, -  Operation: "Update", -  Operation: "Update", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDFjZWRkNzVmZGExYTZhZDQ1OGVhMzljMDgzOWJhZDU2MTAyMzE2NWZiMzFlZWE4MWRjZWQ2ODlkYzI5NThiNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDFjZWRkNzVmZGExYTZhZDQ1OGVhMzljMDgzOWJhZDU2MTAyMzE2NWZiMzFlZWE4MWRjZWQ2ODlkYzI5NThiNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTZlYzE3ZjU0YTMzNTAwMjgxYjdlMmVhZDViNGNiNmU0MGZhNTU2NDEyZTljYjliNzdmZTYzZmM4YmNlNDU2NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNjcxNzcwMDdjNjNjNWRjOGY5MjVlYzM0MzE1OGIzMDIzYWUxMDZmZDU2NTMzYTM3MDZlNzRjMWU3OGJjY2Q3ZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNjM4MDZkOTFkZDJiMmUyOTRiZjI4YjJlNzZhODNhYjg2OGU4NTNjMzU0NDM4YjNhYzU2M2FjNjM1ZjdlNzNhYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNjM4MDZkOTFkZDJiMmUyOTRiZjI4YjJlNzZhODNhYjg2OGU4NTNjMzU0NDM4YjNhYzU2M2FjNjM1ZjdlNzNhYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWQxYmViNzMyZTBiNWVjMDkyNWM4MDE3MDhmZjc4MjYwZjQ1NjgwNmM2MWNhMTNiMGE0ZDNkYjZjZjI5ODNmMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWQxYmViNzMyZTBiNWVjMDkyNWM4MDE3MDhmZjc4MjYwZjQ1NjgwNmM2MWNhMTNiMGE0ZDNkYjZjZjI5ODNmMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWUxMTRkMTliODI0Zjc3N2E5ZGY0ZTkyOWMzODc0NmZiYjQwNDBmNTA2MGY2OTk4NzMzMWY2OTdhZGJiYWI0NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWUxMTRkMTliODI0Zjc3N2E5ZGY0ZTkyOWMzODc0NmZiYjQwNDBmNTA2MGY2OTk4NzMzMWY2OTdhZGJiYWI0NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTFmMDJjNjY1NGQ2YzM4MGQ4NjY3MmU2MzQ0YzEyZmE0ZDM3YTU2MDYxNDdkZTljNTc3MWE1MWEzNmIyZmJlZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWQxYmViNzMyZTBiNWVjMDkyNWM4MDE3MDhmZjc4MjYwZjQ1NjgwNmM2MWNhMTNiMGE0ZDNkYjZjZjI5ODNmMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWQxYmViNzMyZTBiNWVjMDkyNWM4MDE3MDhmZjc4MjYwZjQ1NjgwNmM2MWNhMTNiMGE0ZDNkYjZjZjI5ODNmMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSJ9fSwic3BlYyI6eyJ2b2x1bWVzIjpbeyJuYW1lIjoiaGFwcm94eS1jdXN0b20iLCJjb25maWdNYXAi"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDFjZWRkNzVmZGExYTZhZDQ1OGVhMzljMDgzOWJhZDU2MTAyMzE2NWZiMzFlZWE4MWRjZWQ2ODlkYzI5NThiNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDFjZWRkNzVmZGExYTZhZDQ1OGVhMzljMDgzOWJhZDU2MTAyMzE2NWZiMzFlZWE4MWRjZWQ2ODlkYzI5NThiNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTZlYzE3ZjU0YTMzNTAwMjgxYjdlMmVhZDViNGNiNmU0MGZhNTU2NDEyZTljYjliNzdmZTYzZmM4YmNlNDU2NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTZlYzE3ZjU0YTMzNTAwMjgxYjdlMmVhZDViNGNiNmU0MGZhNTU2NDEyZTljYjliNzdmZTYzZmM4YmNlNDU2NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTZlYzE3ZjU0YTMzNTAwMjgxYjdlMmVhZDViNGNiNmU0MGZhNTU2NDEyZTljYjliNzdmZTYzZmM4YmNlNDU2NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjYwLWEyYzdhZTA1IiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM1LjciLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTZlYzE3ZjU0YTMzNTAwMjgxYjdlMmVhZDViNGNiNmU0MGZhNTU2NDEyZTljYjliNzdmZTYzZmM4YmNlNDU2NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjYwLWEyYzdhZTA1IiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzUuNyIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIzODU2MDMwIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTI4YTlhNDAyMjcwZWNiZWJmYWQxYWQyMWI2ZDRiY2Y3ZTVlYWUzNjBlYjg2MzkxNmFjZTEwMDg3YjJjZmZkOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTI4YTlhNDAyMjcwZWNiZWJmYWQxYWQyMWI2ZDRiY2Y3ZTVlYWUzNjBlYjg2MzkxNmFjZTEwMDg3YjJjZmZkOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWQxYmViNzMyZTBiNWVjMDkyNWM4MDE3MDhmZjc4MjYwZjQ1NjgwNmM2MWNhMTNiMGE0ZDNkYjZjZjI5ODNmMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNjcxNzcwMDdjNjNjNWRjOGY5MjVlYzM0MzE1OGIzMDIzYWUxMDZmZDU2NTMzYTM3MDZlNzRjMWU3OGJjY2Q3ZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, +  Replicas: 0, -  Replicas: 2, -  Replicas: &2, +  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, +  ResourceVersion: "", -  ResourceVersion: "1764094685661183011", -  ResourceVersion: "1764094872384991024", -  ResourceVersion: "1764094992516335011", -  ResourceVersion: "1764095032744319011", -  ResourceVersion: "1764095047640303011", -  ResourceVersion: "1764095205214879024", -  ResourceVersion: "1764095245064623011", -  ResourceVersion: "1764095306252383011", -  ResourceVersion: "1764095356278559011", -  ResourceVersion: "1764095470800351024", -  ResourceVersion: "1764095499502223011", -  ResourceVersion: "1764095609542927024", -  ResourceVersion: "1764095682163055014", -  ResourceVersion: "1764095787150495024", -  ResourceVersion: "1764095857191327014", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  SchedulerName: "default-scheduler", +  SecurityContext: nil, -  SecurityContext: s"&PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmorProfile:nil,SupplementalGroupsPolicy:nil,SELinux"..., -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2025-11-25 18:17:31 +0000 UTC", -  Time: s"2025-11-25 18:18:05 +0000 UTC", -  Time: s"2025-11-25 18:21:12 +0000 UTC", -  Time: s"2025-11-25 18:22:53 +0000 UTC", -  Time: s"2025-11-25 18:23:12 +0000 UTC", -  Time: s"2025-11-25 18:23:19 +0000 UTC", -  Time: s"2025-11-25 18:23:52 +0000 UTC", -  Time: s"2025-11-25 18:24:06 +0000 UTC", -  Time: s"2025-11-25 18:24:07 +0000 UTC", -  Time: s"2025-11-25 18:26:45 +0000 UTC", -  Time: s"2025-11-25 18:26:52 +0000 UTC", -  Time: s"2025-11-25 18:27:25 +0000 UTC", -  Time: s"2025-11-25 18:27:59 +0000 UTC", -  Time: s"2025-11-25 18:28:26 +0000 UTC", -  Time: s"2025-11-25 18:28:51 +0000 UTC", -  Time: s"2025-11-25 18:29:16 +0000 UTC", -  Time: s"2025-11-25 18:31:10 +0000 UTC", -  Time: s"2025-11-25 18:31:17 +0000 UTC", -  Time: s"2025-11-25 18:31:39 +0000 UTC", -  Time: s"2025-11-25 18:33:14 +0000 UTC", -  Time: s"2025-11-25 18:33:29 +0000 UTC", -  Time: s"2025-11-25 18:33:39 +0000 UTC", -  Time: s"2025-11-25 18:34:42 +0000 UTC", -  Time: s"2025-11-25 18:36:27 +0000 UTC", -  Time: s"2025-11-25 18:36:28 +0000 UTC", -  Time: s"2025-11-25 18:37:37 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "2382760b-3d20-4bf6-94fc-bebf02f3dad1", -  UID: "33f87bd6-fb62-497a-9f8e-065037187f34", -  UID: "99991ad2-a7d6-4e76-b8fc-96ba387847aa", +  UpdatedReplicas: 0, -  UpdatedReplicas: 1, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-haproxy-5f6dc6dc58", -  UpdateRevision: "some-name-haproxy-6bc895644f", -  UpdateRevision: "some-name-proxysql-5fc9b579c4", -  UpdateRevision: "some-name-proxysql-675d6c6c8d", -  UpdateRevision: "some-name-proxysql-6c9f6c8dc7", -  UpdateRevision: "some-name-proxysql-7999b56bf8", -  UpdateRevision: "some-name-proxysql-9bf6946cf", -  UpdateRevision: "some-name-proxysql-9f897cff", -  UpdateRevision: "some-name-pxc-5b878d8c49", -  UpdateRevision: "some-name-pxc-5cfcc95ccf", -  UpdateRevision: "some-name-pxc-797c9ff4d6", -  UpdateRevision: "some-name-pxc-7d859b6c85", -  UpdateRevision: "some-name-pxc-84b968ccb", +  Value: "caching_sha2_password", -  Value: "mysql_native_password", -  VolumeMode: &"Filesystem", +  VolumeMode: nil, -  VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}},   }    },    },    {    },    },    {    },    }, ""),    },    {    },    },    },    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 3 identical elements    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 5 identical fields    "6",    ... // 6 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 8 identical fields    "9",    ... // 9 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Annotations: map[string]string{    Args: {"haproxy"},    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMapKeyRef: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 3307,    ContainerPort: 3309,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    ContainerPort: 8404,    Containers: []v1.Container{    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-haproxy"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "READINESS_CHECK_TIMEOUT", Value: "1"}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...},    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    FieldPath: "metadata.name",    FieldPath: "metadata.namespace",    FieldRef: &v1.ObjectFieldSelector{    Finalizers: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostAliases: nil,    HostIP: "",    HostIPC: false,    Hostname: "",    HostPort: 0,    ImagePullPolicy: "Always",    ImagePullSecrets: nil,    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    "kubectl.kubernetes.io/default-container": "haproxy",    "kubectl.kubernetes.io/default-container": "proxysql",    "kubectl.kubernetes.io/default-container": "pxc",    Labels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: nil,    "last-applied-secret": strings.Join({    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-haproxy"},    LocalObjectReference: {Name: "some-name-pxc"},    ManagedFields: nil,    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    {Name: "CLUSTER_HASH", Value: "3856030"},    Name: "config",    Name: "DEFAULT_AUTHENTICATION_PLUGIN",    {Name: "haproxy-auto", VolumeSource: {EmptyDir: &{}}},    Name: "haproxy-custom",    Name: "ist",    {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"},    {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"},    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"},    Name: "mysql-replicas",    {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"},    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}},    Name: "POD_NAME",    Name: "POD_NAMESPASE",    Name: "proxyadm",    Name: "proxy-protocol",    {Name: "READINESS_CHECK_TIMEOUT", Value: "15"},    Name: "some-name-env-vars-haproxy",    Namespace: "users-16376",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}},    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "67177007c63c5dc8f925ec343158b3023ae106fd56533a3706e74c1e78bccd7f", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "16ec17f54a33500281b7e2ead5b4cb6e40fa556412e9cb9b77fe63fc8bce4567", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "9a0fee4b-e3d4-4e42-8426-43e3cd1467b7", ...}},    OwnerReferences: nil,    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &2,    Replicas: &3,    ResizePolicy: nil,    ResourceFieldRef: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}},    SecretName: "internal-some-name",    SecretName: "some-name-env-vars-haproxy",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-haproxy",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    ShareProcessNamespace: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: {{Key: "node.alpha.kubernetes.io/unreachable", Operator: "Exists", Effect: "NoExecute", TolerationSeconds: &6000}},    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    Value: "",    ValueFrom: nil,    ValueFrom: &v1.EnvVarSource{    VolumeAttributesClassName: nil,    VolumeClaimTemplates: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-16376 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HNIYYBHqFp ++ mktemp + local LAST_ERR=/tmp/tmp.zqKlaZahCQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HNIYYBHqFp perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-16376 namespace + cat /tmp/tmp.zqKlaZahCQ + rm /tmp/tmp.HNIYYBHqFp /tmp/tmp.zqKlaZahCQ + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Ppz30cepbO ++ mktemp + local LAST_ERR=/tmp/tmp.7UZQWX8HwA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ppz30cepbO No resources found + cat /tmp/tmp.7UZQWX8HwA + rm /tmp/tmp.Ppz30cepbO /tmp/tmp.7UZQWX8HwA + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.dXe9hPTjDL ++ mktemp + local LAST_ERR=/tmp/tmp.13rzaZRizb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dXe9hPTjDL No resources found + cat /tmp/tmp.13rzaZRizb + rm /tmp/tmp.dXe9hPTjDL /tmp/tmp.13rzaZRizb + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.GSfPPiOk5c ++ mktemp + local LAST_ERR=/tmp/tmp.w2k6Evc2zg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GSfPPiOk5c validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.w2k6Evc2zg + rm /tmp/tmp.GSfPPiOk5c /tmp/tmp.w2k6Evc2zg + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-16376 + rm -rf /tmp/tmp.AxwkEOwxIb + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.UtD0WVy0HQ + desc 'test passed' ++ mktemp + set +o xtrace + local LAST_OUT=/tmp/tmp.l3xivv9Gri ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_ERR=/tmp/tmp.sLjFWIXrs6 + local exit_status=0 + local LAST_ERR=/tmp/tmp.46D4Yz7Qq3 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-16376 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator