Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/logs/users-5-7.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-13708 + local ns=users-13708 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-27473 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.EGfcOyN8ch ++ mktemp + local LAST_ERR=/tmp/tmp.s8UzgzTU9p + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EGfcOyN8ch perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-27473 namespace + cat /tmp/tmp.s8UzgzTU9p + rm /tmp/tmp.EGfcOyN8ch /tmp/tmp.s8UzgzTU9p + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.u3cpf1BJWP ++ mktemp + local LAST_ERR=/tmp/tmp.gHXtAKaO0P + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.u3cpf1BJWP No resources found + cat /tmp/tmp.gHXtAKaO0P + rm /tmp/tmp.u3cpf1BJWP /tmp/tmp.gHXtAKaO0P + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.dGAwmRUDwA ++ mktemp + local LAST_ERR=/tmp/tmp.MVXm2JYGfh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dGAwmRUDwA No resources found + cat /tmp/tmp.MVXm2JYGfh + rm /tmp/tmp.dGAwmRUDwA /tmp/tmp.MVXm2JYGfh + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.FsVLqiSz8m + kubectl_bin get ns ++ mktemp + local LAST_ERR=/tmp/tmp.nFj8IMD73v + local exit_status=0 + awk '{print$1}' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.VB3C1rxQ1e ++ mktemp + local LAST_ERR=/tmp/tmp.Kfg7v2PqfE + local exit_status=0 + xargs kubectl delete ns ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VB3C1rxQ1e + cat /tmp/tmp.Kfg7v2PqfE + rm /tmp/tmp.VB3C1rxQ1e /tmp/tmp.Kfg7v2PqfE + return 0 namespace "users-27473" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FsVLqiSz8m namespace "pxc-operator" deleted + cat /tmp/tmp.nFj8IMD73v + rm /tmp/tmp.FsVLqiSz8m /tmp/tmp.nFj8IMD73v + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.q2ZIemdYAk ++ mktemp + local LAST_ERR=/tmp/tmp.VOY7MjkJoT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.q2ZIemdYAk namespace/pxc-operator created + cat /tmp/tmp.VOY7MjkJoT + rm /tmp/tmp.q2ZIemdYAk /tmp/tmp.VOY7MjkJoT + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.yPLH3geRip +++ mktemp ++ local LAST_ERR=/tmp/tmp.HHcoaScrtP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yPLH3geRip ++ cat /tmp/tmp.HHcoaScrtP ++ rm /tmp/tmp.yPLH3geRip /tmp/tmp.HHcoaScrtP ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2221-842b7a7e-2-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.TOsyv8BkMw ++ mktemp + local LAST_ERR=/tmp/tmp.YeW41EJ7QC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2221-842b7a7e-2-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TOsyv8BkMw Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2221-842b7a7e-2-cluster8" modified. + cat /tmp/tmp.YeW41EJ7QC + rm /tmp/tmp.TOsyv8BkMw /tmp/tmp.YeW41EJ7QC + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.FeRNe4CFhh ++ mktemp + local LAST_ERR=/tmp/tmp.H1qc9ykgz8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FeRNe4CFhh customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.H1qc9ykgz8 + rm /tmp/tmp.FeRNe4CFhh /tmp/tmp.H1qc9ykgz8 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.G19Hs8YRaP ++ mktemp + local LAST_ERR=/tmp/tmp.XA91TLMrAY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.G19Hs8YRaP clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.XA91TLMrAY + rm /tmp/tmp.G19Hs8YRaP /tmp/tmp.XA91TLMrAY + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/deploy/cw-operator.yaml + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2221-842b7a7e^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.avRA8I5BgD ++ mktemp + local LAST_ERR=/tmp/tmp.WyWYZS5FRV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.avRA8I5BgD deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.WyWYZS5FRV + rm /tmp/tmp.avRA8I5BgD /tmp/tmp.WyWYZS5FRV + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.BT58myTAXb ++ mktemp + local LAST_ERR=/tmp/tmp.Qv98avRlEa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BT58myTAXb pod/percona-xtradb-cluster-operator-6d956cbb4b-7ftng condition met + cat /tmp/tmp.Qv98avRlEa + rm /tmp/tmp.BT58myTAXb /tmp/tmp.Qv98avRlEa + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.hwSfdo96Um +++ mktemp ++ local LAST_ERR=/tmp/tmp.xyfyXxzCdc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hwSfdo96Um ++ cat /tmp/tmp.xyfyXxzCdc ++ rm /tmp/tmp.hwSfdo96Um /tmp/tmp.xyfyXxzCdc ++ return 0 + wait_pod percona-xtradb-cluster-operator-6d956cbb4b-7ftng 480 pxc-operator + local pod=percona-xtradb-cluster-operator-6d956cbb4b-7ftng + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-6d956cbb4b-7ftng ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-6d956cbb4b-7ftng condition met waiting for pod/percona-xtradb-cluster-operator-6d956cbb4b-7ftng to become Ready.Ok + sleep 3 + create_namespace users-13708 + local namespace=users-13708 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-13708' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-13708 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-13708 + kubectl_bin get ns ++ mktemp ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.qsdcAAZ33j ++ mktemp + local LAST_OUT=/tmp/tmp.fLM6FJrUsO + local LAST_ERR=/tmp/tmp.n5YZU9rojc + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.TblmOWC9F5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + for i in '$(seq 0 2)' + kubectl delete namespace users-13708 + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qsdcAAZ33j + cat /tmp/tmp.n5YZU9rojc + rm /tmp/tmp.qsdcAAZ33j /tmp/tmp.n5YZU9rojc + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-13708 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-13708 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.fLM6FJrUsO + cat /tmp/tmp.TblmOWC9F5 Error from server (NotFound): namespaces "users-13708" not found + rm /tmp/tmp.fLM6FJrUsO /tmp/tmp.TblmOWC9F5 + return 1 + : + wait_for_delete namespace/users-13708 + local res=namespace/users-13708 + echo -n 'waiting for namespace/users-13708 to be deleted' waiting for namespace/users-13708 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-13708" not found + desc 'create namespace users-13708' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-13708 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-13708 ++ mktemp + local LAST_OUT=/tmp/tmp.55QCv5cY7f ++ mktemp + local LAST_ERR=/tmp/tmp.DnQViwv3X9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-13708 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.55QCv5cY7f namespace/users-13708 created + cat /tmp/tmp.DnQViwv3X9 + rm /tmp/tmp.55QCv5cY7f /tmp/tmp.DnQViwv3X9 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.jzqfzpB1uy +++ mktemp ++ local LAST_ERR=/tmp/tmp.W20B85bP5s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jzqfzpB1uy ++ cat /tmp/tmp.W20B85bP5s ++ rm /tmp/tmp.jzqfzpB1uy /tmp/tmp.W20B85bP5s ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2221-842b7a7e-2-cluster8 --namespace=users-13708 ++ mktemp + local LAST_OUT=/tmp/tmp.lr2VK1ekGu ++ mktemp + local LAST_ERR=/tmp/tmp.jRhzurqwL4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2221-842b7a7e-2-cluster8 --namespace=users-13708 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lr2VK1ekGu Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2221-842b7a7e-2-cluster8" modified. + cat /tmp/tmp.jRhzurqwL4 + rm /tmp/tmp.lr2VK1ekGu /tmp/tmp.jRhzurqwL4 + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.7bbH2Ws21P ++ mktemp + local LAST_ERR=/tmp/tmp.TwcQbm8T0H + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7bbH2Ws21P secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.TwcQbm8T0H + rm /tmp/tmp.7bbH2Ws21P /tmp/tmp.TwcQbm8T0H + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.j0WsfSUGP5 ++ mktemp + local LAST_ERR=/tmp/tmp.XrIFF0JbtH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.j0WsfSUGP5 secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.XrIFF0JbtH + rm /tmp/tmp.j0WsfSUGP5 /tmp/tmp.XrIFF0JbtH + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.zwMfaiLxWK + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-13708~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2221-842b7a7e#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + local LAST_ERR=/tmp/tmp.XEp2DOsIUr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zwMfaiLxWK deployment.apps/pxc-client created + cat /tmp/tmp.XEp2DOsIUr + rm /tmp/tmp.zwMfaiLxWK /tmp/tmp.XEp2DOsIUr + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-13708~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_OUT=/tmp/tmp.c4z8Laeqnm + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2221-842b7a7e#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/conf/some-name.yml + local LAST_ERR=/tmp/tmp.ypAmYqiHrL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.c4z8Laeqnm perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.ypAmYqiHrL + rm /tmp/tmp.c4z8Laeqnm /tmp/tmp.ypAmYqiHrL + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.nXn8J0B2Y9 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.QLQwdxRUTu +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.nXn8J0B2Y9 +++ cat /tmp/tmp.QLQwdxRUTu +++ rm /tmp/tmp.nXn8J0B2Y9 /tmp/tmp.QLQwdxRUTu +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.IZ0qlw9of7 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.gtJwDTZOlw +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.IZ0qlw9of7 +++ cat /tmp/tmp.gtJwDTZOlw +++ rm /tmp/tmp.IZ0qlw9of7 /tmp/tmp.gtJwDTZOlw +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-13708 ++ mktemp + local LAST_OUT=/tmp/tmp.Jx5oDCJ7aP ++ mktemp + local LAST_ERR=/tmp/tmp.wHcFfWbayS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-13708 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-13708 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-13708 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.Jx5oDCJ7aP + cat /tmp/tmp.wHcFfWbayS error: no matching resources found + rm /tmp/tmp.Jx5oDCJ7aP /tmp/tmp.wHcFfWbayS + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.F1cXyshdDx +++ mktemp ++ local LAST_ERR=/tmp/tmp.8uVzMgEVjH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F1cXyshdDx ++ cat /tmp/tmp.8uVzMgEVjH ++ rm /tmp/tmp.F1cXyshdDx /tmp/tmp.8uVzMgEVjH ++ return 0 + local 'root_pass=[O&{0IVEI2P99M#,v~3' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xS5FlHxgzZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.sZRJda2so9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xS5FlHxgzZ ++ cat /tmp/tmp.sZRJda2so9 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.xS5FlHxgzZ /tmp/tmp.sZRJda2so9 ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.px2Ln9RSUL +++ mktemp ++ local LAST_ERR=/tmp/tmp.DbLY4Tb1ND ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.px2Ln9RSUL ++ cat /tmp/tmp.DbLY4Tb1ND ++ rm /tmp/tmp.px2Ln9RSUL /tmp/tmp.DbLY4Tb1ND ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WNdZCPCIS8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.R9nP38rl8g ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WNdZCPCIS8 ++ cat /tmp/tmp.R9nP38rl8g ++ rm /tmp/tmp.WNdZCPCIS8 /tmp/tmp.R9nP38rl8g ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pi6z9ArmR3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3HLFLOvfo0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Pi6z9ArmR3 ++ cat /tmp/tmp.3HLFLOvfo0 ++ rm /tmp/tmp.Pi6z9ArmR3 /tmp/tmp.3HLFLOvfo0 ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-1.sql /tmp/tmp.3yLEg1h0KG/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3ViPmmeqfO +++ mktemp ++ local LAST_ERR=/tmp/tmp.aoWEpCPVLE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3ViPmmeqfO ++ cat /tmp/tmp.aoWEpCPVLE ++ rm /tmp/tmp.3ViPmmeqfO /tmp/tmp.aoWEpCPVLE ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-1.sql /tmp/tmp.3yLEg1h0KG/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''[O&{0IVEI2P99M#,v~3'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c8We1BkUJG +++ mktemp ++ local LAST_ERR=/tmp/tmp.cK8DfvtGh1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c8We1BkUJG ++ cat /tmp/tmp.cK8DfvtGh1 ++ rm /tmp/tmp.c8We1BkUJG /tmp/tmp.cK8DfvtGh1 ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-1.sql /tmp/tmp.3yLEg1h0KG/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.flt5DvaTA1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NmYr6mRf5u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.flt5DvaTA1 ++ cat /tmp/tmp.NmYr6mRf5u Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.flt5DvaTA1 /tmp/tmp.NmYr6mRf5u ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.zfdThxIVGp +++ mktemp ++ local LAST_ERR=/tmp/tmp.QXrHb16Gwi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zfdThxIVGp ++ cat /tmp/tmp.QXrHb16Gwi ++ rm /tmp/tmp.zfdThxIVGp /tmp/tmp.QXrHb16Gwi ++ return 0 + secret_pass='[O&{0IVEI2P99M#,v~3' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.uHaKaIqdDn +++ mktemp ++ local LAST_ERR=/tmp/tmp.xVTNXGAQxc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uHaKaIqdDn ++ cat /tmp/tmp.xVTNXGAQxc ++ rm /tmp/tmp.uHaKaIqdDn /tmp/tmp.xVTNXGAQxc ++ return 0 + int_secret_pass='[O&{0IVEI2P99M#,v~3' + [[ -z [O&{0IVEI2P99M#,v~3 ]] + [[ [O&{0IVEI2P99M#,v~3 != \[\O\&\{\0\I\V\E\I\2\P\9\9\M\#\,\v\~\3 ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''[O&{0IVEI2P99M#,v~3'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''[O&{0IVEI2P99M#,v~3'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''[O&{0IVEI2P99M#,v~3'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''[O&{0IVEI2P99M#,v~3'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Xcew02Vmp +++ mktemp ++ local LAST_ERR=/tmp/tmp.QA73jFiPfg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4Xcew02Vmp ++ cat /tmp/tmp.QA73jFiPfg ++ rm /tmp/tmp.4Xcew02Vmp /tmp/tmp.QA73jFiPfg ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql /tmp/tmp.3yLEg1h0KG/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.5oT5ROF9mv +++ mktemp ++ local LAST_ERR=/tmp/tmp.neYg4xImvC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5oT5ROF9mv ++ cat /tmp/tmp.neYg4xImvC ++ rm /tmp/tmp.5oT5ROF9mv /tmp/tmp.neYg4xImvC ++ return 0 + secret_pass='GG(#V7VVhVYcJE32&6}' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.vmhhLiPJ5N +++ mktemp ++ local LAST_ERR=/tmp/tmp.3vtEMZV7mW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vmhhLiPJ5N ++ cat /tmp/tmp.3vtEMZV7mW ++ rm /tmp/tmp.vmhhLiPJ5N /tmp/tmp.3vtEMZV7mW ++ return 0 + int_secret_pass='GG(#V7VVhVYcJE32&6}' + [[ -z GG(#V7VVhVYcJE32&6} ]] + [[ GG(#V7VVhVYcJE32&6} != \G\G\(\#\V\7\V\V\h\V\Y\c\J\E\3\2\&\6\} ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''GG(#V7VVhVYcJE32&6}'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''GG(#V7VVhVYcJE32&6}'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''GG(#V7VVhVYcJE32&6}'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''GG(#V7VVhVYcJE32&6}'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7ZvMHFaro0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.l9cNF40XS0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7ZvMHFaro0 ++ cat /tmp/tmp.l9cNF40XS0 ++ rm /tmp/tmp.7ZvMHFaro0 /tmp/tmp.l9cNF40XS0 ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql /tmp/tmp.3yLEg1h0KG/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.4AvwAe99GO +++ mktemp ++ local LAST_ERR=/tmp/tmp.PUm4drGzog ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4AvwAe99GO ++ cat /tmp/tmp.PUm4drGzog ++ rm /tmp/tmp.4AvwAe99GO /tmp/tmp.PUm4drGzog ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.EtPgFAzzvB +++ mktemp ++ local LAST_ERR=/tmp/tmp.J18Gz9O7SH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EtPgFAzzvB ++ cat /tmp/tmp.J18Gz9O7SH ++ rm /tmp/tmp.EtPgFAzzvB /tmp/tmp.J18Gz9O7SH ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nZCaRslhJr +++ mktemp ++ local LAST_ERR=/tmp/tmp.yxkKj1Md27 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nZCaRslhJr ++ cat /tmp/tmp.yxkKj1Md27 ++ rm /tmp/tmp.nZCaRslhJr /tmp/tmp.yxkKj1Md27 ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql /tmp/tmp.3yLEg1h0KG/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.quJvEeCrof +++ mktemp ++ local LAST_ERR=/tmp/tmp.APBJF4Tkbu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.quJvEeCrof ++ cat /tmp/tmp.APBJF4Tkbu ++ rm /tmp/tmp.quJvEeCrof /tmp/tmp.APBJF4Tkbu ++ return 0 + secret_pass='.a#Tx4DhYm,cxL<7]A' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.K19a5JvRcz +++ mktemp ++ local LAST_ERR=/tmp/tmp.X4XXkApPBW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K19a5JvRcz ++ cat /tmp/tmp.X4XXkApPBW ++ rm /tmp/tmp.K19a5JvRcz /tmp/tmp.X4XXkApPBW ++ return 0 + int_secret_pass='.a#Tx4DhYm,cxL<7]A' + [[ -z .a#Tx4DhYm,cxL<7]A ]] + [[ .a#Tx4DhYm,cxL<7]A != \.\a\#\T\x\4\D\h\Y\m\,\c\x\L\<\7\]\A ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''.a#Tx4DhYm,cxL<7]A'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''.a#Tx4DhYm,cxL<7]A'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''.a#Tx4DhYm,cxL<7]A'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''.a#Tx4DhYm,cxL<7]A'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-2.sql /tmp/tmp.3yLEg1h0KG/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.eolBlctRvO +++ mktemp ++ local LAST_ERR=/tmp/tmp.NGmUaLiUdM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eolBlctRvO ++ cat /tmp/tmp.NGmUaLiUdM ++ rm /tmp/tmp.eolBlctRvO /tmp/tmp.NGmUaLiUdM ++ return 0 + secret_pass='d#S$&d[s#W*F+8OdS' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Aix7rSJepn +++ mktemp ++ local LAST_ERR=/tmp/tmp.R1ARh1LwpH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Aix7rSJepn ++ cat /tmp/tmp.R1ARh1LwpH ++ rm /tmp/tmp.Aix7rSJepn /tmp/tmp.R1ARh1LwpH ++ return 0 + int_secret_pass='d#S$&d[s#W*F+8OdS' + [[ -z d#S$&d[s#W*F+8OdS ]] + [[ d#S$&d[s#W*F+8OdS != \d\#\S\$\&\d\[\s\#\W\*\F\+\8\O\d\S ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''d#S$&d[s#W*F+8OdS'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''d#S$&d[s#W*F+8OdS'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''d#S$&d[s#W*F+8OdS'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''d#S$&d[s#W*F+8OdS'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6FkMtvGujx +++ mktemp ++ local LAST_ERR=/tmp/tmp.QdEWD5zgXa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6FkMtvGujx ++ cat /tmp/tmp.QdEWD5zgXa ++ rm /tmp/tmp.6FkMtvGujx /tmp/tmp.QdEWD5zgXa ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql /tmp/tmp.3yLEg1h0KG/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.3ZtHCJmnkY +++ mktemp ++ local LAST_ERR=/tmp/tmp.EjoUP7c8Ra ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3ZtHCJmnkY ++ cat /tmp/tmp.EjoUP7c8Ra ++ rm /tmp/tmp.3ZtHCJmnkY /tmp/tmp.EjoUP7c8Ra ++ return 0 + secret_pass='cnUT%c(KpMQx3,wxGD1' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.SSB1DhHeBk +++ mktemp ++ local LAST_ERR=/tmp/tmp.OWtaCFScUG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SSB1DhHeBk ++ cat /tmp/tmp.OWtaCFScUG ++ rm /tmp/tmp.SSB1DhHeBk /tmp/tmp.OWtaCFScUG ++ return 0 + int_secret_pass='cnUT%c(KpMQx3,wxGD1' + [[ -z cnUT%c(KpMQx3,wxGD1 ]] + [[ cnUT%c(KpMQx3,wxGD1 != \c\n\U\T\%\c\(\K\p\M\Q\x\3\,\w\x\G\D\1 ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''cnUT%c(KpMQx3,wxGD1'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''cnUT%c(KpMQx3,wxGD1'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''cnUT%c(KpMQx3,wxGD1'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''cnUT%c(KpMQx3,wxGD1'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9BUMidzYlx +++ mktemp ++ local LAST_ERR=/tmp/tmp.ovWFnTvmsi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9BUMidzYlx ++ cat /tmp/tmp.ovWFnTvmsi ++ rm /tmp/tmp.9BUMidzYlx /tmp/tmp.ovWFnTvmsi ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql /tmp/tmp.3yLEg1h0KG/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.MKEHNzw9EP ++ mktemp + local LAST_ERR=/tmp/tmp.oh3gst90lp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MKEHNzw9EP secret/my-cluster-secrets patched + cat /tmp/tmp.oh3gst90lp + rm /tmp/tmp.MKEHNzw9EP /tmp/tmp.oh3gst90lp + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.COiBPCfDom +++ mktemp ++ local LAST_ERR=/tmp/tmp.fqR23FzsMl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.COiBPCfDom ++ cat /tmp/tmp.fqR23FzsMl ++ rm /tmp/tmp.COiBPCfDom /tmp/tmp.fqR23FzsMl ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql /tmp/tmp.3yLEg1h0KG/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Ppqh7n9Z6j ++ mktemp + local LAST_ERR=/tmp/tmp.YeCWIBYNdb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ppqh7n9Z6j perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.YeCWIBYNdb + rm /tmp/tmp.Ppqh7n9Z6j /tmp/tmp.YeCWIBYNdb + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OVsEMoOr4b +++ mktemp ++ local LAST_ERR=/tmp/tmp.dTvz48d3Vl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OVsEMoOr4b ++ cat /tmp/tmp.dTvz48d3Vl ++ rm /tmp/tmp.OVsEMoOr4b /tmp/tmp.dTvz48d3Vl ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aX0ibn5kyC +++ mktemp ++ local LAST_ERR=/tmp/tmp.UeOPqQz4fD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aX0ibn5kyC ++ cat /tmp/tmp.UeOPqQz4fD ++ rm /tmp/tmp.aX0ibn5kyC /tmp/tmp.UeOPqQz4fD ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.hTFzNWj0PV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mq7z6T5aUV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.hTFzNWj0PV +++++ cat /tmp/tmp.mq7z6T5aUV +++++ rm /tmp/tmp.hTFzNWj0PV /tmp/tmp.mq7z6T5aUV +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.80AUCe943G ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MMXtFWm5v6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.80AUCe943G +++++ cat /tmp/tmp.MMXtFWm5v6 +++++ rm /tmp/tmp.80AUCe943G /tmp/tmp.MMXtFWm5v6 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xfHi4GyRsZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.V9oBe3zAzq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xfHi4GyRsZ ++ cat /tmp/tmp.V9oBe3zAzq ++ rm /tmp/tmp.xfHi4GyRsZ /tmp/tmp.V9oBe3zAzq ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.VYgYceW32A ++ mktemp + local LAST_ERR=/tmp/tmp.yFK02CXAIv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VYgYceW32A secret/my-cluster-secrets patched + cat /tmp/tmp.yFK02CXAIv + rm /tmp/tmp.VYgYceW32A /tmp/tmp.yFK02CXAIv + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OZYPP3NQPY +++ mktemp ++ local LAST_ERR=/tmp/tmp.vTrlbEbFjI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OZYPP3NQPY ++ cat /tmp/tmp.vTrlbEbFjI ++ rm /tmp/tmp.OZYPP3NQPY /tmp/tmp.vTrlbEbFjI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H6aNegdmgP +++ mktemp ++ local LAST_ERR=/tmp/tmp.UOqsxUEyD7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H6aNegdmgP ++ cat /tmp/tmp.UOqsxUEyD7 ++ rm /tmp/tmp.H6aNegdmgP /tmp/tmp.UOqsxUEyD7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.neqnBd3bud +++ mktemp ++ local LAST_ERR=/tmp/tmp.PhIa3DTqxW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.neqnBd3bud ++ cat /tmp/tmp.PhIa3DTqxW ++ rm /tmp/tmp.neqnBd3bud /tmp/tmp.PhIa3DTqxW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HBbi8K1lVz +++ mktemp ++ local LAST_ERR=/tmp/tmp.SmcpBxSxi0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HBbi8K1lVz ++ cat /tmp/tmp.SmcpBxSxi0 ++ rm /tmp/tmp.HBbi8K1lVz /tmp/tmp.SmcpBxSxi0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QGWU9aCQ6c +++ mktemp ++ local LAST_ERR=/tmp/tmp.Uq3kAZYgrd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QGWU9aCQ6c ++ cat /tmp/tmp.Uq3kAZYgrd ++ rm /tmp/tmp.QGWU9aCQ6c /tmp/tmp.Uq3kAZYgrd ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AKQKaeIqDL +++ mktemp ++ local LAST_ERR=/tmp/tmp.L8kKmPxDoL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AKQKaeIqDL ++ cat /tmp/tmp.L8kKmPxDoL ++ rm /tmp/tmp.AKQKaeIqDL /tmp/tmp.L8kKmPxDoL ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.a4YKKmHVPz ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZApmtQTbul +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.a4YKKmHVPz +++++ cat /tmp/tmp.ZApmtQTbul +++++ rm /tmp/tmp.a4YKKmHVPz /tmp/tmp.ZApmtQTbul +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.focqcexbSG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.lE2kccQaah +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.focqcexbSG +++++ cat /tmp/tmp.lE2kccQaah +++++ rm /tmp/tmp.focqcexbSG /tmp/tmp.lE2kccQaah +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f7oEiF7Wpz +++ mktemp ++ local LAST_ERR=/tmp/tmp.SDabMIW0BB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f7oEiF7Wpz ++ cat /tmp/tmp.SDabMIW0BB ++ rm /tmp/tmp.f7oEiF7Wpz /tmp/tmp.SDabMIW0BB ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-2.sql /tmp/tmp.3yLEg1h0KG/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-2.sql /tmp/tmp.3yLEg1h0KG/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-2.sql /tmp/tmp.3yLEg1h0KG/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.QGG5KNpIj0 ++ mktemp + local LAST_ERR=/tmp/tmp.fKDs1ScQ5y + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QGG5KNpIj0 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.fKDs1ScQ5y + rm /tmp/tmp.QGG5KNpIj0 /tmp/tmp.fKDs1ScQ5y + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.XZixkurMcV ++ mktemp + local LAST_ERR=/tmp/tmp.kE9JoT3c4A + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XZixkurMcV secret/my-cluster-secrets patched + cat /tmp/tmp.kE9JoT3c4A + rm /tmp/tmp.XZixkurMcV /tmp/tmp.kE9JoT3c4A + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4c8JWykXb2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.QzVo6ecV4C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4c8JWykXb2 ++ cat /tmp/tmp.QzVo6ecV4C ++ rm /tmp/tmp.4c8JWykXb2 /tmp/tmp.QzVo6ecV4C ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zj97mpI0zI +++ mktemp ++ local LAST_ERR=/tmp/tmp.9Ax3tHSv45 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zj97mpI0zI ++ cat /tmp/tmp.9Ax3tHSv45 ++ rm /tmp/tmp.Zj97mpI0zI /tmp/tmp.9Ax3tHSv45 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MI05Od5QQG +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ygm9DtKdKw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MI05Od5QQG ++ cat /tmp/tmp.Ygm9DtKdKw ++ rm /tmp/tmp.MI05Od5QQG /tmp/tmp.Ygm9DtKdKw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WM3Q1yDquq +++ mktemp ++ local LAST_ERR=/tmp/tmp.ERXsDlK4ob ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WM3Q1yDquq ++ cat /tmp/tmp.ERXsDlK4ob ++ rm /tmp/tmp.WM3Q1yDquq /tmp/tmp.ERXsDlK4ob ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zIaJWWqa5A +++ mktemp ++ local LAST_ERR=/tmp/tmp.eUuu4fRi4v ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zIaJWWqa5A ++ cat /tmp/tmp.eUuu4fRi4v ++ rm /tmp/tmp.zIaJWWqa5A /tmp/tmp.eUuu4fRi4v ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aX6FGPbqgc +++ mktemp ++ local LAST_ERR=/tmp/tmp.XPNjfMAn4V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aX6FGPbqgc ++ cat /tmp/tmp.XPNjfMAn4V ++ rm /tmp/tmp.aX6FGPbqgc /tmp/tmp.XPNjfMAn4V ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JgmHB7VSUX +++ mktemp ++ local LAST_ERR=/tmp/tmp.9aVc4CheFB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JgmHB7VSUX ++ cat /tmp/tmp.9aVc4CheFB ++ rm /tmp/tmp.JgmHB7VSUX /tmp/tmp.9aVc4CheFB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.57Ch61s61i +++ mktemp ++ local LAST_ERR=/tmp/tmp.NMe0Ev2B2z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.57Ch61s61i ++ cat /tmp/tmp.NMe0Ev2B2z ++ rm /tmp/tmp.57Ch61s61i /tmp/tmp.NMe0Ev2B2z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fLum5WOmY4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.agzOdCoUiB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fLum5WOmY4 ++ cat /tmp/tmp.agzOdCoUiB ++ rm /tmp/tmp.fLum5WOmY4 /tmp/tmp.agzOdCoUiB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QkmR5J7dgt +++ mktemp ++ local LAST_ERR=/tmp/tmp.3P0p59TXdT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QkmR5J7dgt ++ cat /tmp/tmp.3P0p59TXdT ++ rm /tmp/tmp.QkmR5J7dgt /tmp/tmp.3P0p59TXdT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nQsMKVf9LS +++ mktemp ++ local LAST_ERR=/tmp/tmp.SfyPr8Z92q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nQsMKVf9LS ++ cat /tmp/tmp.SfyPr8Z92q ++ rm /tmp/tmp.nQsMKVf9LS /tmp/tmp.SfyPr8Z92q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E9BAcln02v +++ mktemp ++ local LAST_ERR=/tmp/tmp.reAF7i4yk1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E9BAcln02v ++ cat /tmp/tmp.reAF7i4yk1 ++ rm /tmp/tmp.E9BAcln02v /tmp/tmp.reAF7i4yk1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hi08Nj2YWc +++ mktemp ++ local LAST_ERR=/tmp/tmp.buZcZtyX7K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hi08Nj2YWc ++ cat /tmp/tmp.buZcZtyX7K ++ rm /tmp/tmp.Hi08Nj2YWc /tmp/tmp.buZcZtyX7K ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xw78mHckzE +++ mktemp ++ local LAST_ERR=/tmp/tmp.b4Pqpr1oHK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Xw78mHckzE ++ cat /tmp/tmp.b4Pqpr1oHK ++ rm /tmp/tmp.Xw78mHckzE /tmp/tmp.b4Pqpr1oHK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L64pnKcNBb +++ mktemp ++ local LAST_ERR=/tmp/tmp.0UG9PXr5SX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L64pnKcNBb ++ cat /tmp/tmp.0UG9PXr5SX ++ rm /tmp/tmp.L64pnKcNBb /tmp/tmp.0UG9PXr5SX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v8rBkLCF1E +++ mktemp ++ local LAST_ERR=/tmp/tmp.uDA5Sk3vGz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v8rBkLCF1E ++ cat /tmp/tmp.uDA5Sk3vGz ++ rm /tmp/tmp.v8rBkLCF1E /tmp/tmp.uDA5Sk3vGz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q0GDFEtBro +++ mktemp ++ local LAST_ERR=/tmp/tmp.TK88he9O3w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q0GDFEtBro ++ cat /tmp/tmp.TK88he9O3w ++ rm /tmp/tmp.Q0GDFEtBro /tmp/tmp.TK88he9O3w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4S5676oPLs +++ mktemp ++ local LAST_ERR=/tmp/tmp.tPMMrxlIu3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4S5676oPLs ++ cat /tmp/tmp.tPMMrxlIu3 ++ rm /tmp/tmp.4S5676oPLs /tmp/tmp.tPMMrxlIu3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JcQMdhkO5i +++ mktemp ++ local LAST_ERR=/tmp/tmp.EEW3LN4cfP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JcQMdhkO5i ++ cat /tmp/tmp.EEW3LN4cfP ++ rm /tmp/tmp.JcQMdhkO5i /tmp/tmp.EEW3LN4cfP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GU6Wky8wyu +++ mktemp ++ local LAST_ERR=/tmp/tmp.SXeWH9Qyzi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GU6Wky8wyu ++ cat /tmp/tmp.SXeWH9Qyzi ++ rm /tmp/tmp.GU6Wky8wyu /tmp/tmp.SXeWH9Qyzi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pSfQqpP9eX +++ mktemp ++ local LAST_ERR=/tmp/tmp.RuWBjcST3i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pSfQqpP9eX ++ cat /tmp/tmp.RuWBjcST3i ++ rm /tmp/tmp.pSfQqpP9eX /tmp/tmp.RuWBjcST3i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gAU8TT3puz +++ mktemp ++ local LAST_ERR=/tmp/tmp.3xVnP2dGvV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gAU8TT3puz ++ cat /tmp/tmp.3xVnP2dGvV ++ rm /tmp/tmp.gAU8TT3puz /tmp/tmp.3xVnP2dGvV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HfW8nkkrco +++ mktemp ++ local LAST_ERR=/tmp/tmp.dyVWWrvZZY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HfW8nkkrco ++ cat /tmp/tmp.dyVWWrvZZY ++ rm /tmp/tmp.HfW8nkkrco /tmp/tmp.dyVWWrvZZY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZJLNaChwni +++ mktemp ++ local LAST_ERR=/tmp/tmp.chzyWRblXj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZJLNaChwni ++ cat /tmp/tmp.chzyWRblXj ++ rm /tmp/tmp.ZJLNaChwni /tmp/tmp.chzyWRblXj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8v9QDLCbns +++ mktemp ++ local LAST_ERR=/tmp/tmp.ttkXHm1ack ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8v9QDLCbns ++ cat /tmp/tmp.ttkXHm1ack ++ rm /tmp/tmp.8v9QDLCbns /tmp/tmp.ttkXHm1ack ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wJ4fIfxism +++ mktemp ++ local LAST_ERR=/tmp/tmp.kUFsh3pPhj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wJ4fIfxism ++ cat /tmp/tmp.kUFsh3pPhj ++ rm /tmp/tmp.wJ4fIfxism /tmp/tmp.kUFsh3pPhj ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GMu8xdndhT ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mkJjbJ5RzU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GMu8xdndhT +++++ cat /tmp/tmp.mkJjbJ5RzU +++++ rm /tmp/tmp.GMu8xdndhT /tmp/tmp.mkJjbJ5RzU +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.hTPai1YR9I ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.pESXN7VUxD +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.hTPai1YR9I +++++ cat /tmp/tmp.pESXN7VUxD +++++ rm /tmp/tmp.hTPai1YR9I /tmp/tmp.pESXN7VUxD +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZNEGsFiXmu +++ mktemp ++ local LAST_ERR=/tmp/tmp.gS2ZbkTwdM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZNEGsFiXmu ++ cat /tmp/tmp.gS2ZbkTwdM ++ rm /tmp/tmp.ZNEGsFiXmu /tmp/tmp.gS2ZbkTwdM ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-3.sql /tmp/tmp.3yLEg1h0KG/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.h8e81FdWtA ++ mktemp + local LAST_ERR=/tmp/tmp.vIBoAezcoL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.h8e81FdWtA secret/my-cluster-secrets patched + cat /tmp/tmp.vIBoAezcoL + rm /tmp/tmp.h8e81FdWtA /tmp/tmp.vIBoAezcoL + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.67og5IX765 +++ mktemp ++ local LAST_ERR=/tmp/tmp.81CcEMKaJb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.67og5IX765 ++ cat /tmp/tmp.81CcEMKaJb ++ rm /tmp/tmp.67og5IX765 /tmp/tmp.81CcEMKaJb ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ehGr36FSbQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.yzTnyhw2QM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ehGr36FSbQ ++ cat /tmp/tmp.yzTnyhw2QM ++ rm /tmp/tmp.ehGr36FSbQ /tmp/tmp.yzTnyhw2QM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jAcWQwWz5q +++ mktemp ++ local LAST_ERR=/tmp/tmp.m8jZMdTfcE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jAcWQwWz5q ++ cat /tmp/tmp.m8jZMdTfcE ++ rm /tmp/tmp.jAcWQwWz5q /tmp/tmp.m8jZMdTfcE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UiJp4r3F2N +++ mktemp ++ local LAST_ERR=/tmp/tmp.w3GX69Rpby ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UiJp4r3F2N ++ cat /tmp/tmp.w3GX69Rpby ++ rm /tmp/tmp.UiJp4r3F2N /tmp/tmp.w3GX69Rpby ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pK3gCmuosb +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZGz9TJvds7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pK3gCmuosb ++ cat /tmp/tmp.ZGz9TJvds7 ++ rm /tmp/tmp.pK3gCmuosb /tmp/tmp.ZGz9TJvds7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vTf4P7eXpY +++ mktemp ++ local LAST_ERR=/tmp/tmp.iR12lH2XqO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vTf4P7eXpY ++ cat /tmp/tmp.iR12lH2XqO ++ rm /tmp/tmp.vTf4P7eXpY /tmp/tmp.iR12lH2XqO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0eHbgy13IS +++ mktemp ++ local LAST_ERR=/tmp/tmp.qirQLND1MY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0eHbgy13IS ++ cat /tmp/tmp.qirQLND1MY ++ rm /tmp/tmp.0eHbgy13IS /tmp/tmp.qirQLND1MY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WViVRN7MjH +++ mktemp ++ local LAST_ERR=/tmp/tmp.U9wtCK00cL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WViVRN7MjH ++ cat /tmp/tmp.U9wtCK00cL ++ rm /tmp/tmp.WViVRN7MjH /tmp/tmp.U9wtCK00cL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9tW43a4Uu1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.i3yuCdeq6G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9tW43a4Uu1 ++ cat /tmp/tmp.i3yuCdeq6G ++ rm /tmp/tmp.9tW43a4Uu1 /tmp/tmp.i3yuCdeq6G ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ycr3L3P33x ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.fKUcXgrVJ3 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ycr3L3P33x +++++ cat /tmp/tmp.fKUcXgrVJ3 +++++ rm /tmp/tmp.ycr3L3P33x /tmp/tmp.fKUcXgrVJ3 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.fvAEx81Hmf ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.wEGJKnNOId +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.fvAEx81Hmf +++++ cat /tmp/tmp.wEGJKnNOId +++++ rm /tmp/tmp.fvAEx81Hmf /tmp/tmp.wEGJKnNOId +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZYUbmjYibh +++ mktemp ++ local LAST_ERR=/tmp/tmp.tr6lbz2iC0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZYUbmjYibh ++ cat /tmp/tmp.tr6lbz2iC0 ++ rm /tmp/tmp.ZYUbmjYibh /tmp/tmp.tr6lbz2iC0 ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4jRJWqMHuD +++ mktemp ++ local LAST_ERR=/tmp/tmp.bxQMRSXqrA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4jRJWqMHuD ++ cat /tmp/tmp.bxQMRSXqrA ++ rm /tmp/tmp.4jRJWqMHuD /tmp/tmp.bxQMRSXqrA ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql /tmp/tmp.3yLEg1h0KG/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.vKmZkGWBuo ++ mktemp + local LAST_ERR=/tmp/tmp.gCFMgZNn1d + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vKmZkGWBuo secret/my-cluster-secrets patched + cat /tmp/tmp.gCFMgZNn1d + rm /tmp/tmp.vKmZkGWBuo /tmp/tmp.gCFMgZNn1d + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JHf3GDEEN8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.axIYKnYnZL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JHf3GDEEN8 ++ cat /tmp/tmp.axIYKnYnZL ++ rm /tmp/tmp.JHf3GDEEN8 /tmp/tmp.axIYKnYnZL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dokg3IlmLg +++ mktemp ++ local LAST_ERR=/tmp/tmp.03X2pKDE8S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dokg3IlmLg ++ cat /tmp/tmp.03X2pKDE8S ++ rm /tmp/tmp.Dokg3IlmLg /tmp/tmp.03X2pKDE8S ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.apYv7K4Pq7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WHxLplarZg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.apYv7K4Pq7 ++ cat /tmp/tmp.WHxLplarZg ++ rm /tmp/tmp.apYv7K4Pq7 /tmp/tmp.WHxLplarZg ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.TcQX4aMxT4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mHDntKLOPS +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.TcQX4aMxT4 +++++ cat /tmp/tmp.mHDntKLOPS +++++ rm /tmp/tmp.TcQX4aMxT4 /tmp/tmp.mHDntKLOPS +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ciuFi2hyEZ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7Uutm4GWjI +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ciuFi2hyEZ +++++ cat /tmp/tmp.7Uutm4GWjI +++++ rm /tmp/tmp.ciuFi2hyEZ /tmp/tmp.7Uutm4GWjI +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1EndsXbxeJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.FEF35khBew ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1EndsXbxeJ ++ cat /tmp/tmp.FEF35khBew ++ rm /tmp/tmp.1EndsXbxeJ /tmp/tmp.FEF35khBew ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0sfYNTApEL +++ mktemp ++ local LAST_ERR=/tmp/tmp.ftOvvI1KWO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0sfYNTApEL ++ cat /tmp/tmp.ftOvvI1KWO ++ rm /tmp/tmp.0sfYNTApEL /tmp/tmp.ftOvvI1KWO ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql /tmp/tmp.3yLEg1h0KG/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.XLNv0JQ8L5 ++ mktemp + local LAST_ERR=/tmp/tmp.FOzwBg2m9w + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XLNv0JQ8L5 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.FOzwBg2m9w + rm /tmp/tmp.XLNv0JQ8L5 /tmp/tmp.FOzwBg2m9w + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PFWnjIVKju +++ mktemp ++ local LAST_ERR=/tmp/tmp.2X045pR3ES ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PFWnjIVKju ++ cat /tmp/tmp.2X045pR3ES ++ rm /tmp/tmp.PFWnjIVKju /tmp/tmp.2X045pR3ES ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oQITeXGElf +++ mktemp ++ local LAST_ERR=/tmp/tmp.Pz2HFwZXUO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oQITeXGElf ++ cat /tmp/tmp.Pz2HFwZXUO ++ rm /tmp/tmp.oQITeXGElf /tmp/tmp.Pz2HFwZXUO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aMMU94d1VK +++ mktemp ++ local LAST_ERR=/tmp/tmp.SergGG7edQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aMMU94d1VK ++ cat /tmp/tmp.SergGG7edQ ++ rm /tmp/tmp.aMMU94d1VK /tmp/tmp.SergGG7edQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YxoI8XHBIp +++ mktemp ++ local LAST_ERR=/tmp/tmp.SSyGXClVxE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YxoI8XHBIp ++ cat /tmp/tmp.SSyGXClVxE ++ rm /tmp/tmp.YxoI8XHBIp /tmp/tmp.SSyGXClVxE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.awcgiL4u4a +++ mktemp ++ local LAST_ERR=/tmp/tmp.mhHNlJLr1C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.awcgiL4u4a ++ cat /tmp/tmp.mhHNlJLr1C ++ rm /tmp/tmp.awcgiL4u4a /tmp/tmp.mhHNlJLr1C ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vLzBJ222D4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.N6GnBNxpaU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vLzBJ222D4 ++ cat /tmp/tmp.N6GnBNxpaU ++ rm /tmp/tmp.vLzBJ222D4 /tmp/tmp.N6GnBNxpaU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B2wdVMmjTZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.O63cnlfcqK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B2wdVMmjTZ ++ cat /tmp/tmp.O63cnlfcqK ++ rm /tmp/tmp.B2wdVMmjTZ /tmp/tmp.O63cnlfcqK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SCayc2RDPT +++ mktemp ++ local LAST_ERR=/tmp/tmp.eRM2Z0MOrm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SCayc2RDPT ++ cat /tmp/tmp.eRM2Z0MOrm ++ rm /tmp/tmp.SCayc2RDPT /tmp/tmp.eRM2Z0MOrm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g8LoCJTqGD +++ mktemp ++ local LAST_ERR=/tmp/tmp.Yf0bWZElOr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g8LoCJTqGD ++ cat /tmp/tmp.Yf0bWZElOr ++ rm /tmp/tmp.g8LoCJTqGD /tmp/tmp.Yf0bWZElOr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S02fk6tOVa +++ mktemp ++ local LAST_ERR=/tmp/tmp.wUdUqBs4N5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S02fk6tOVa ++ cat /tmp/tmp.wUdUqBs4N5 ++ rm /tmp/tmp.S02fk6tOVa /tmp/tmp.wUdUqBs4N5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l3EFzGBsnj +++ mktemp ++ local LAST_ERR=/tmp/tmp.9ZneSZ4JJy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l3EFzGBsnj ++ cat /tmp/tmp.9ZneSZ4JJy ++ rm /tmp/tmp.l3EFzGBsnj /tmp/tmp.9ZneSZ4JJy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kuIWL3tNUV +++ mktemp ++ local LAST_ERR=/tmp/tmp.m3I5UTLWPz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kuIWL3tNUV ++ cat /tmp/tmp.m3I5UTLWPz ++ rm /tmp/tmp.kuIWL3tNUV /tmp/tmp.m3I5UTLWPz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xbTek4ytcU +++ mktemp ++ local LAST_ERR=/tmp/tmp.oipYL68Hh8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xbTek4ytcU ++ cat /tmp/tmp.oipYL68Hh8 ++ rm /tmp/tmp.xbTek4ytcU /tmp/tmp.oipYL68Hh8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Slyaqb42y1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dpy6jUtaz2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Slyaqb42y1 ++ cat /tmp/tmp.dpy6jUtaz2 ++ rm /tmp/tmp.Slyaqb42y1 /tmp/tmp.dpy6jUtaz2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.id3JruoHVW +++ mktemp ++ local LAST_ERR=/tmp/tmp.3oNQdNGxba ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.id3JruoHVW ++ cat /tmp/tmp.3oNQdNGxba ++ rm /tmp/tmp.id3JruoHVW /tmp/tmp.3oNQdNGxba ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HhJSWx3Ndw +++ mktemp ++ local LAST_ERR=/tmp/tmp.cOeMToxwhq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HhJSWx3Ndw ++ cat /tmp/tmp.cOeMToxwhq ++ rm /tmp/tmp.HhJSWx3Ndw /tmp/tmp.cOeMToxwhq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lrW4FEmgRZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.nsfiUGXISb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lrW4FEmgRZ ++ cat /tmp/tmp.nsfiUGXISb ++ rm /tmp/tmp.lrW4FEmgRZ /tmp/tmp.nsfiUGXISb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZdepRDxTNf +++ mktemp ++ local LAST_ERR=/tmp/tmp.RSCpjZ0RtX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZdepRDxTNf ++ cat /tmp/tmp.RSCpjZ0RtX ++ rm /tmp/tmp.ZdepRDxTNf /tmp/tmp.RSCpjZ0RtX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dVi19f3bbV +++ mktemp ++ local LAST_ERR=/tmp/tmp.0Oeh3sHXau ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dVi19f3bbV ++ cat /tmp/tmp.0Oeh3sHXau ++ rm /tmp/tmp.dVi19f3bbV /tmp/tmp.0Oeh3sHXau ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hazLrrVuKJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.r2Qz4L4v1k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hazLrrVuKJ ++ cat /tmp/tmp.r2Qz4L4v1k ++ rm /tmp/tmp.hazLrrVuKJ /tmp/tmp.r2Qz4L4v1k ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m2rPlKJ12B +++ mktemp ++ local LAST_ERR=/tmp/tmp.OXcgy4DEoP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m2rPlKJ12B ++ cat /tmp/tmp.OXcgy4DEoP ++ rm /tmp/tmp.m2rPlKJ12B /tmp/tmp.OXcgy4DEoP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FA8D99jC0E +++ mktemp ++ local LAST_ERR=/tmp/tmp.I3oHEEE4nE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FA8D99jC0E ++ cat /tmp/tmp.I3oHEEE4nE ++ rm /tmp/tmp.FA8D99jC0E /tmp/tmp.I3oHEEE4nE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5zYX7nJoYp +++ mktemp ++ local LAST_ERR=/tmp/tmp.hez7Vy6equ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5zYX7nJoYp ++ cat /tmp/tmp.hez7Vy6equ ++ rm /tmp/tmp.5zYX7nJoYp /tmp/tmp.hez7Vy6equ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o9Be18zTQY +++ mktemp ++ local LAST_ERR=/tmp/tmp.gooEF1xRtH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o9Be18zTQY ++ cat /tmp/tmp.gooEF1xRtH ++ rm /tmp/tmp.o9Be18zTQY /tmp/tmp.gooEF1xRtH ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.9MKJ8OgMtq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SzHZyQZrNn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.9MKJ8OgMtq +++++ cat /tmp/tmp.SzHZyQZrNn +++++ rm /tmp/tmp.9MKJ8OgMtq /tmp/tmp.SzHZyQZrNn +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.cZZeWEHmCy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dKYVNemPbO +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.cZZeWEHmCy +++++ cat /tmp/tmp.dKYVNemPbO +++++ rm /tmp/tmp.cZZeWEHmCy /tmp/tmp.dKYVNemPbO +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LXghxt6LGj +++ mktemp ++ local LAST_ERR=/tmp/tmp.bvkOmktqhs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LXghxt6LGj ++ cat /tmp/tmp.bvkOmktqhs ++ rm /tmp/tmp.LXghxt6LGj /tmp/tmp.bvkOmktqhs ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.0FH4hoxvYv ++ mktemp + local LAST_ERR=/tmp/tmp.O2tJsLkMyE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0FH4hoxvYv secret/my-cluster-secrets-2 patched + cat /tmp/tmp.O2tJsLkMyE + rm /tmp/tmp.0FH4hoxvYv /tmp/tmp.O2tJsLkMyE + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t0UvW3YaHr +++ mktemp ++ local LAST_ERR=/tmp/tmp.CmprpSmdNx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t0UvW3YaHr ++ cat /tmp/tmp.CmprpSmdNx ++ rm /tmp/tmp.t0UvW3YaHr /tmp/tmp.CmprpSmdNx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ePaS6bv40i +++ mktemp ++ local LAST_ERR=/tmp/tmp.h2yCrCMpPC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ePaS6bv40i ++ cat /tmp/tmp.h2yCrCMpPC ++ rm /tmp/tmp.ePaS6bv40i /tmp/tmp.h2yCrCMpPC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sgSRnEuIfz +++ mktemp ++ local LAST_ERR=/tmp/tmp.1tJdak271a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sgSRnEuIfz ++ cat /tmp/tmp.1tJdak271a ++ rm /tmp/tmp.sgSRnEuIfz /tmp/tmp.1tJdak271a ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I9Z36YM5aZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.GKHQJtSylz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I9Z36YM5aZ ++ cat /tmp/tmp.GKHQJtSylz ++ rm /tmp/tmp.I9Z36YM5aZ /tmp/tmp.GKHQJtSylz ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uYCnf0n3ju +++ mktemp ++ local LAST_ERR=/tmp/tmp.8wGMgfCY4h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uYCnf0n3ju ++ cat /tmp/tmp.8wGMgfCY4h ++ rm /tmp/tmp.uYCnf0n3ju /tmp/tmp.8wGMgfCY4h ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Gw6N9bQMvw ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hVFRr771z8 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Gw6N9bQMvw +++++ cat /tmp/tmp.hVFRr771z8 +++++ rm /tmp/tmp.Gw6N9bQMvw /tmp/tmp.hVFRr771z8 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RW3wrnhKXA ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.P0RWYUuzqc +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RW3wrnhKXA +++++ cat /tmp/tmp.P0RWYUuzqc +++++ rm /tmp/tmp.RW3wrnhKXA /tmp/tmp.P0RWYUuzqc +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wiUEHs9cdn +++ mktemp ++ local LAST_ERR=/tmp/tmp.P5cS7MrXNT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wiUEHs9cdn ++ cat /tmp/tmp.P5cS7MrXNT ++ rm /tmp/tmp.wiUEHs9cdn /tmp/tmp.P5cS7MrXNT ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yb9NcA5ba8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.pnsDHDMOSW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yb9NcA5ba8 ++ cat /tmp/tmp.pnsDHDMOSW ++ rm /tmp/tmp.yb9NcA5ba8 /tmp/tmp.pnsDHDMOSW ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql /tmp/tmp.3yLEg1h0KG/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hnf9DXaMFP +++ mktemp ++ local LAST_ERR=/tmp/tmp.3qYPzuut1P ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hnf9DXaMFP ++ cat /tmp/tmp.3qYPzuut1P ++ rm /tmp/tmp.Hnf9DXaMFP /tmp/tmp.3qYPzuut1P ++ return 0 + newpass='2p]o{G5K~0ad%yEvpaG' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''2p]o{G5K~0ad%yEvpaG'\'';' '-h some-name-pxc -uroot -p'\''2p]o{G5K~0ad%yEvpaG'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''2p]o{G5K~0ad%yEvpaG'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''2p]o{G5K~0ad%yEvpaG'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AJKtYhVpxQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.MV1448WOKK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AJKtYhVpxQ ++ cat /tmp/tmp.MV1448WOKK ++ rm /tmp/tmp.AJKtYhVpxQ /tmp/tmp.MV1448WOKK ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''2p]o{G5K~0ad%yEvpaG'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''2p]o{G5K~0ad%yEvpaG'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''2p]o{G5K~0ad%yEvpaG'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''2p]o{G5K~0ad%yEvpaG'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.coajh3iAR5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RkRgGL7NCg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.coajh3iAR5 ++ cat /tmp/tmp.RkRgGL7NCg ++ rm /tmp/tmp.coajh3iAR5 /tmp/tmp.RkRgGL7NCg ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql /tmp/tmp.3yLEg1h0KG/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.lhsiA5GigP +++ mktemp ++ local LAST_ERR=/tmp/tmp.fDr61hD8E4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lhsiA5GigP ++ cat /tmp/tmp.fDr61hD8E4 ++ rm /tmp/tmp.lhsiA5GigP /tmp/tmp.fDr61hD8E4 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Ntl44HGGiq ++ mktemp + local LAST_ERR=/tmp/tmp.gL4Q1CoQsw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ntl44HGGiq secret/my-cluster-secrets-2 configured + cat /tmp/tmp.gL4Q1CoQsw Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.Ntl44HGGiq /tmp/tmp.gL4Q1CoQsw + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fj8K7N2lgu +++ mktemp ++ local LAST_ERR=/tmp/tmp.F7rEtROYcU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fj8K7N2lgu ++ cat /tmp/tmp.F7rEtROYcU ++ rm /tmp/tmp.fj8K7N2lgu /tmp/tmp.F7rEtROYcU ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-4.sql /tmp/tmp.3yLEg1h0KG/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2221-842b7a7e#' + local LAST_OUT=/tmp/tmp.PWiwZ8vI8s + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-13708~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.owAg9ODAC7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PWiwZ8vI8s perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.owAg9ODAC7 + rm /tmp/tmp.PWiwZ8vI8s /tmp/tmp.owAg9ODAC7 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0ZRK20A1oM +++ mktemp ++ local LAST_ERR=/tmp/tmp.QBCRaE9M9N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0ZRK20A1oM ++ cat /tmp/tmp.QBCRaE9M9N ++ rm /tmp/tmp.0ZRK20A1oM /tmp/tmp.QBCRaE9M9N ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LZDh5V4Ngg +++ mktemp ++ local LAST_ERR=/tmp/tmp.t8rXWyoV1w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LZDh5V4Ngg ++ cat /tmp/tmp.t8rXWyoV1w ++ rm /tmp/tmp.LZDh5V4Ngg /tmp/tmp.t8rXWyoV1w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tOyfOzGAHS +++ mktemp ++ local LAST_ERR=/tmp/tmp.tae4v7R9C1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tOyfOzGAHS ++ cat /tmp/tmp.tae4v7R9C1 ++ rm /tmp/tmp.tOyfOzGAHS /tmp/tmp.tae4v7R9C1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IXA3lzU2Cj +++ mktemp ++ local LAST_ERR=/tmp/tmp.j662ukHAVF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IXA3lzU2Cj ++ cat /tmp/tmp.j662ukHAVF ++ rm /tmp/tmp.IXA3lzU2Cj /tmp/tmp.j662ukHAVF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1iOn0D8Vf5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VtFr7AlXns ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1iOn0D8Vf5 ++ cat /tmp/tmp.VtFr7AlXns ++ rm /tmp/tmp.1iOn0D8Vf5 /tmp/tmp.VtFr7AlXns ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cO4wtMpl0o +++ mktemp ++ local LAST_ERR=/tmp/tmp.PQCH8Hoz10 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cO4wtMpl0o ++ cat /tmp/tmp.PQCH8Hoz10 ++ rm /tmp/tmp.cO4wtMpl0o /tmp/tmp.PQCH8Hoz10 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.71tV1adeQA +++ mktemp ++ local LAST_ERR=/tmp/tmp.OVfOb4nPqX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.71tV1adeQA ++ cat /tmp/tmp.OVfOb4nPqX ++ rm /tmp/tmp.71tV1adeQA /tmp/tmp.OVfOb4nPqX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tRIctLRI3e +++ mktemp ++ local LAST_ERR=/tmp/tmp.nlS5pwREat ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tRIctLRI3e ++ cat /tmp/tmp.nlS5pwREat ++ rm /tmp/tmp.tRIctLRI3e /tmp/tmp.nlS5pwREat ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u6xFRdd4JI +++ mktemp ++ local LAST_ERR=/tmp/tmp.O91rsICHt8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u6xFRdd4JI ++ cat /tmp/tmp.O91rsICHt8 ++ rm /tmp/tmp.u6xFRdd4JI /tmp/tmp.O91rsICHt8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ISSzHUvGmR +++ mktemp ++ local LAST_ERR=/tmp/tmp.n86N36Nbbe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ISSzHUvGmR ++ cat /tmp/tmp.n86N36Nbbe ++ rm /tmp/tmp.ISSzHUvGmR /tmp/tmp.n86N36Nbbe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Yx4k34RFz3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RnCaiv7iVM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Yx4k34RFz3 ++ cat /tmp/tmp.RnCaiv7iVM ++ rm /tmp/tmp.Yx4k34RFz3 /tmp/tmp.RnCaiv7iVM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zkbl3ZXMM6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.doxcdLdLNK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zkbl3ZXMM6 ++ cat /tmp/tmp.doxcdLdLNK ++ rm /tmp/tmp.Zkbl3ZXMM6 /tmp/tmp.doxcdLdLNK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yf1uSUO6RU +++ mktemp ++ local LAST_ERR=/tmp/tmp.lC1Z4guE6d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yf1uSUO6RU ++ cat /tmp/tmp.lC1Z4guE6d ++ rm /tmp/tmp.yf1uSUO6RU /tmp/tmp.lC1Z4guE6d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ghDFYpCPwX +++ mktemp ++ local LAST_ERR=/tmp/tmp.3ktf9pPDhE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ghDFYpCPwX ++ cat /tmp/tmp.3ktf9pPDhE ++ rm /tmp/tmp.ghDFYpCPwX /tmp/tmp.3ktf9pPDhE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fCjYRKfJBr +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZTAviAVmVE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fCjYRKfJBr ++ cat /tmp/tmp.ZTAviAVmVE ++ rm /tmp/tmp.fCjYRKfJBr /tmp/tmp.ZTAviAVmVE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ASa17Hb46R +++ mktemp ++ local LAST_ERR=/tmp/tmp.WfLTSwzcwR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ASa17Hb46R ++ cat /tmp/tmp.WfLTSwzcwR ++ rm /tmp/tmp.ASa17Hb46R /tmp/tmp.WfLTSwzcwR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bPKUz12KHE +++ mktemp ++ local LAST_ERR=/tmp/tmp.sTWrdnxKz3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bPKUz12KHE ++ cat /tmp/tmp.sTWrdnxKz3 ++ rm /tmp/tmp.bPKUz12KHE /tmp/tmp.sTWrdnxKz3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vPlwIt7zIM +++ mktemp ++ local LAST_ERR=/tmp/tmp.cBNWzRBCAX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vPlwIt7zIM ++ cat /tmp/tmp.cBNWzRBCAX ++ rm /tmp/tmp.vPlwIt7zIM /tmp/tmp.cBNWzRBCAX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nwDOSaylhh +++ mktemp ++ local LAST_ERR=/tmp/tmp.gcYrxs0579 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nwDOSaylhh ++ cat /tmp/tmp.gcYrxs0579 ++ rm /tmp/tmp.nwDOSaylhh /tmp/tmp.gcYrxs0579 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iYFgMbu1JO +++ mktemp ++ local LAST_ERR=/tmp/tmp.bVgfEcS5gd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iYFgMbu1JO ++ cat /tmp/tmp.bVgfEcS5gd ++ rm /tmp/tmp.iYFgMbu1JO /tmp/tmp.bVgfEcS5gd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AtBRm1ziE6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KXAronWcuO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AtBRm1ziE6 ++ cat /tmp/tmp.KXAronWcuO ++ rm /tmp/tmp.AtBRm1ziE6 /tmp/tmp.KXAronWcuO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RGbd3wkrzC +++ mktemp ++ local LAST_ERR=/tmp/tmp.oN3OurXrT6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RGbd3wkrzC ++ cat /tmp/tmp.oN3OurXrT6 ++ rm /tmp/tmp.RGbd3wkrzC /tmp/tmp.oN3OurXrT6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r8HGFbOcSo +++ mktemp ++ local LAST_ERR=/tmp/tmp.bV7pKgLob5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r8HGFbOcSo ++ cat /tmp/tmp.bV7pKgLob5 ++ rm /tmp/tmp.r8HGFbOcSo /tmp/tmp.bV7pKgLob5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7OGmWBhOcG +++ mktemp ++ local LAST_ERR=/tmp/tmp.B7n9LmpX9r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7OGmWBhOcG ++ cat /tmp/tmp.B7n9LmpX9r ++ rm /tmp/tmp.7OGmWBhOcG /tmp/tmp.B7n9LmpX9r ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dDyKZktuBS +++ mktemp ++ local LAST_ERR=/tmp/tmp.l9fVCOFdqc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dDyKZktuBS ++ cat /tmp/tmp.l9fVCOFdqc ++ rm /tmp/tmp.dDyKZktuBS /tmp/tmp.l9fVCOFdqc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sl1wJ4ltpD +++ mktemp ++ local LAST_ERR=/tmp/tmp.TYLONTpE0H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sl1wJ4ltpD ++ cat /tmp/tmp.TYLONTpE0H ++ rm /tmp/tmp.sl1wJ4ltpD /tmp/tmp.TYLONTpE0H ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cmGKXOVOId +++ mktemp ++ local LAST_ERR=/tmp/tmp.9ydMQdY5W9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cmGKXOVOId ++ cat /tmp/tmp.9ydMQdY5W9 ++ rm /tmp/tmp.cmGKXOVOId /tmp/tmp.9ydMQdY5W9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GeYvHy04Vd +++ mktemp ++ local LAST_ERR=/tmp/tmp.meRCQVP3wW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GeYvHy04Vd ++ cat /tmp/tmp.meRCQVP3wW ++ rm /tmp/tmp.GeYvHy04Vd /tmp/tmp.meRCQVP3wW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yD369eB1fY +++ mktemp ++ local LAST_ERR=/tmp/tmp.4Pl7B07twD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yD369eB1fY ++ cat /tmp/tmp.4Pl7B07twD ++ rm /tmp/tmp.yD369eB1fY /tmp/tmp.4Pl7B07twD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iWIqoQXoE6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fan4U3y8QK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iWIqoQXoE6 ++ cat /tmp/tmp.fan4U3y8QK ++ rm /tmp/tmp.iWIqoQXoE6 /tmp/tmp.fan4U3y8QK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wtKFvnkagA +++ mktemp ++ local LAST_ERR=/tmp/tmp.ahRXTD5ugV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wtKFvnkagA ++ cat /tmp/tmp.ahRXTD5ugV ++ rm /tmp/tmp.wtKFvnkagA /tmp/tmp.ahRXTD5ugV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YWC7b8Sxxb +++ mktemp ++ local LAST_ERR=/tmp/tmp.zOSfAb5zhW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YWC7b8Sxxb ++ cat /tmp/tmp.zOSfAb5zhW ++ rm /tmp/tmp.YWC7b8Sxxb /tmp/tmp.zOSfAb5zhW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fw6XCfdNg4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Zltt8j1o65 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Fw6XCfdNg4 ++ cat /tmp/tmp.Zltt8j1o65 ++ rm /tmp/tmp.Fw6XCfdNg4 /tmp/tmp.Zltt8j1o65 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BzACUpBbnF +++ mktemp ++ local LAST_ERR=/tmp/tmp.z0HzDIuRVK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BzACUpBbnF ++ cat /tmp/tmp.z0HzDIuRVK ++ rm /tmp/tmp.BzACUpBbnF /tmp/tmp.z0HzDIuRVK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.99AvuQdLpa +++ mktemp ++ local LAST_ERR=/tmp/tmp.KDRzqNRl9H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.99AvuQdLpa ++ cat /tmp/tmp.KDRzqNRl9H ++ rm /tmp/tmp.99AvuQdLpa /tmp/tmp.KDRzqNRl9H ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XEzQUekGgi +++ mktemp ++ local LAST_ERR=/tmp/tmp.5yVxeTd5Tn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XEzQUekGgi ++ cat /tmp/tmp.5yVxeTd5Tn ++ rm /tmp/tmp.XEzQUekGgi /tmp/tmp.5yVxeTd5Tn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xYUV7RRRLK +++ mktemp ++ local LAST_ERR=/tmp/tmp.2nVQdOAO1N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xYUV7RRRLK ++ cat /tmp/tmp.2nVQdOAO1N ++ rm /tmp/tmp.xYUV7RRRLK /tmp/tmp.2nVQdOAO1N ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pKUoX2pBZb +++ mktemp ++ local LAST_ERR=/tmp/tmp.TlxT0HpnlY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pKUoX2pBZb ++ cat /tmp/tmp.TlxT0HpnlY ++ rm /tmp/tmp.pKUoX2pBZb /tmp/tmp.TlxT0HpnlY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QQMBWLAU7O +++ mktemp ++ local LAST_ERR=/tmp/tmp.1oT2GSHlIB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QQMBWLAU7O ++ cat /tmp/tmp.1oT2GSHlIB ++ rm /tmp/tmp.QQMBWLAU7O /tmp/tmp.1oT2GSHlIB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Rwxyc4y71L +++ mktemp ++ local LAST_ERR=/tmp/tmp.el0ekMcRlX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Rwxyc4y71L ++ cat /tmp/tmp.el0ekMcRlX ++ rm /tmp/tmp.Rwxyc4y71L /tmp/tmp.el0ekMcRlX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o9z6fCcK1P +++ mktemp ++ local LAST_ERR=/tmp/tmp.JLprxLxkXA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o9z6fCcK1P ++ cat /tmp/tmp.JLprxLxkXA ++ rm /tmp/tmp.o9z6fCcK1P /tmp/tmp.JLprxLxkXA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c6ZHgzs0WR +++ mktemp ++ local LAST_ERR=/tmp/tmp.cfGQ9THV31 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c6ZHgzs0WR ++ cat /tmp/tmp.cfGQ9THV31 ++ rm /tmp/tmp.c6ZHgzs0WR /tmp/tmp.cfGQ9THV31 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.USev3ud5WM +++ mktemp ++ local LAST_ERR=/tmp/tmp.MHoKJVyNZC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.USev3ud5WM ++ cat /tmp/tmp.MHoKJVyNZC ++ rm /tmp/tmp.USev3ud5WM /tmp/tmp.MHoKJVyNZC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qzmRLH4eGl +++ mktemp ++ local LAST_ERR=/tmp/tmp.aeLEQSd9wQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qzmRLH4eGl ++ cat /tmp/tmp.aeLEQSd9wQ ++ rm /tmp/tmp.qzmRLH4eGl /tmp/tmp.aeLEQSd9wQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rIBt39ggH0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VgSm4dfWL4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rIBt39ggH0 ++ cat /tmp/tmp.VgSm4dfWL4 ++ rm /tmp/tmp.rIBt39ggH0 /tmp/tmp.VgSm4dfWL4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9h2AH9pDyQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.hnOIpLU3Ec ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9h2AH9pDyQ ++ cat /tmp/tmp.hnOIpLU3Ec ++ rm /tmp/tmp.9h2AH9pDyQ /tmp/tmp.hnOIpLU3Ec ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E5hKOlwucH +++ mktemp ++ local LAST_ERR=/tmp/tmp.1EAmPvKEmV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E5hKOlwucH ++ cat /tmp/tmp.1EAmPvKEmV ++ rm /tmp/tmp.E5hKOlwucH /tmp/tmp.1EAmPvKEmV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GnsXJLfxZX +++ mktemp ++ local LAST_ERR=/tmp/tmp.eqlvi93GfD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GnsXJLfxZX ++ cat /tmp/tmp.eqlvi93GfD ++ rm /tmp/tmp.GnsXJLfxZX /tmp/tmp.eqlvi93GfD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8UpLaY5xVs +++ mktemp ++ local LAST_ERR=/tmp/tmp.v1q2yHlE5n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8UpLaY5xVs ++ cat /tmp/tmp.v1q2yHlE5n ++ rm /tmp/tmp.8UpLaY5xVs /tmp/tmp.v1q2yHlE5n ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.idAxNeOafI +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wgxsu313er ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.idAxNeOafI ++ cat /tmp/tmp.Wgxsu313er ++ rm /tmp/tmp.idAxNeOafI /tmp/tmp.Wgxsu313er ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Gd3ug16kBG +++ mktemp ++ local LAST_ERR=/tmp/tmp.4XTdsnvwy6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Gd3ug16kBG ++ cat /tmp/tmp.4XTdsnvwy6 ++ rm /tmp/tmp.Gd3ug16kBG /tmp/tmp.4XTdsnvwy6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TKrRCqX7JO +++ mktemp ++ local LAST_ERR=/tmp/tmp.EIG4tLlC4G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TKrRCqX7JO ++ cat /tmp/tmp.EIG4tLlC4G ++ rm /tmp/tmp.TKrRCqX7JO /tmp/tmp.EIG4tLlC4G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 51 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YVAW8lqvOR +++ mktemp ++ local LAST_ERR=/tmp/tmp.WoxboPNA6U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YVAW8lqvOR ++ cat /tmp/tmp.WoxboPNA6U ++ rm /tmp/tmp.YVAW8lqvOR /tmp/tmp.WoxboPNA6U ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rXDFpKYkAJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.y1obd2GF2e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rXDFpKYkAJ ++ cat /tmp/tmp.y1obd2GF2e ++ rm /tmp/tmp.rXDFpKYkAJ /tmp/tmp.y1obd2GF2e ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.So6RPfswgV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bbOl5DFwn1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.So6RPfswgV +++++ cat /tmp/tmp.bbOl5DFwn1 +++++ rm /tmp/tmp.So6RPfswgV /tmp/tmp.bbOl5DFwn1 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WSQ2fRzLHi +++ mktemp ++ local LAST_ERR=/tmp/tmp.xwFqBiM33Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WSQ2fRzLHi ++ cat /tmp/tmp.xwFqBiM33Q ++ rm /tmp/tmp.WSQ2fRzLHi /tmp/tmp.xwFqBiM33Q ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 2 haproxy some-name + local generation=2 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TMO6x2az8q +++ mktemp ++ local LAST_ERR=/tmp/tmp.QXWatxh1D0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TMO6x2az8q ++ cat /tmp/tmp.QXWatxh1D0 ++ rm /tmp/tmp.TMO6x2az8q /tmp/tmp.QXWatxh1D0 ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.yCG2nHWv59 ++ mktemp + local LAST_ERR=/tmp/tmp.AQAtHUtcan + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yCG2nHWv59 secret/my-cluster-secrets patched + cat /tmp/tmp.AQAtHUtcan + rm /tmp/tmp.yCG2nHWv59 /tmp/tmp.AQAtHUtcan + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fgu54RJPMf +++ mktemp ++ local LAST_ERR=/tmp/tmp.mvIMp5a8YM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Fgu54RJPMf ++ cat /tmp/tmp.mvIMp5a8YM ++ rm /tmp/tmp.Fgu54RJPMf /tmp/tmp.mvIMp5a8YM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sqo5wHq5jN +++ mktemp ++ local LAST_ERR=/tmp/tmp.mHVkmOgr2z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sqo5wHq5jN ++ cat /tmp/tmp.mHVkmOgr2z ++ rm /tmp/tmp.sqo5wHq5jN /tmp/tmp.mHVkmOgr2z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zwGolXqBC0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.N6utjVtjEh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zwGolXqBC0 ++ cat /tmp/tmp.N6utjVtjEh ++ rm /tmp/tmp.zwGolXqBC0 /tmp/tmp.N6utjVtjEh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.adCnwj2PeI +++ mktemp ++ local LAST_ERR=/tmp/tmp.dQXx1K9BHk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.adCnwj2PeI ++ cat /tmp/tmp.dQXx1K9BHk ++ rm /tmp/tmp.adCnwj2PeI /tmp/tmp.dQXx1K9BHk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OEpnus4359 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sG9131S55X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OEpnus4359 ++ cat /tmp/tmp.sG9131S55X ++ rm /tmp/tmp.OEpnus4359 /tmp/tmp.sG9131S55X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K9RS6esdJV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ERUH2dmMsm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K9RS6esdJV ++ cat /tmp/tmp.ERUH2dmMsm ++ rm /tmp/tmp.K9RS6esdJV /tmp/tmp.ERUH2dmMsm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pidZzfMUdg +++ mktemp ++ local LAST_ERR=/tmp/tmp.CiD1gW0rrD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pidZzfMUdg ++ cat /tmp/tmp.CiD1gW0rrD ++ rm /tmp/tmp.pidZzfMUdg /tmp/tmp.CiD1gW0rrD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tK2LWLKBze +++ mktemp ++ local LAST_ERR=/tmp/tmp.F9eQoS5wJY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tK2LWLKBze ++ cat /tmp/tmp.F9eQoS5wJY ++ rm /tmp/tmp.tK2LWLKBze /tmp/tmp.F9eQoS5wJY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vTPWO8upLU +++ mktemp ++ local LAST_ERR=/tmp/tmp.NUwBmPQEZy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vTPWO8upLU ++ cat /tmp/tmp.NUwBmPQEZy ++ rm /tmp/tmp.vTPWO8upLU /tmp/tmp.NUwBmPQEZy ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P2CB6C9In1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HmKQ2dzzkV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P2CB6C9In1 ++ cat /tmp/tmp.HmKQ2dzzkV ++ rm /tmp/tmp.P2CB6C9In1 /tmp/tmp.HmKQ2dzzkV ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qVw8AqBSPs ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.tC99co7jFd +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qVw8AqBSPs +++++ cat /tmp/tmp.tC99co7jFd +++++ rm /tmp/tmp.qVw8AqBSPs /tmp/tmp.tC99co7jFd +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U7d08W2YS9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4QCRuxFOZk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U7d08W2YS9 ++ cat /tmp/tmp.4QCRuxFOZk ++ rm /tmp/tmp.U7d08W2YS9 /tmp/tmp.4QCRuxFOZk ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-3-57.sql ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.886MXXt4oP +++ mktemp ++ local LAST_ERR=/tmp/tmp.JAN57MVCCl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.886MXXt4oP ++ cat /tmp/tmp.JAN57MVCCl ++ rm /tmp/tmp.886MXXt4oP /tmp/tmp.JAN57MVCCl ++ return 0 + client_pod=pxc-client-857d976497-m2vcp + wait_pod pxc-client-857d976497-m2vcp + local pod=pxc-client-857d976497-m2vcp + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-m2vcp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-m2vcp condition met waiting for pod/pxc-client-857d976497-m2vcp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3yLEg1h0KG/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2221/e2e-tests/users/compare/select-3.sql /tmp/tmp.3yLEg1h0KG/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 3 haproxy some-name + local generation=3 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vsvbAwIP62 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4oIbDqiKcw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vsvbAwIP62 ++ cat /tmp/tmp.4oIbDqiKcw ++ rm /tmp/tmp.vsvbAwIP62 /tmp/tmp.4oIbDqiKcw ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + destroy users-13708 + local namespace=users-13708 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + tee /tmp/tmp.3yLEg1h0KG/operator.log + sort -u +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.mKfaIlFb7V +++ mktemp ++ local LAST_ERR=/tmp/tmp.uMk0BElrzH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mKfaIlFb7V ++ cat /tmp/tmp.uMk0BElrzH ++ rm /tmp/tmp.mKfaIlFb7V /tmp/tmp.uMk0BElrzH ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-6d956cbb4b-7ftng ++ mktemp + local LAST_OUT=/tmp/tmp.9Z4p0xNHRJ ++ mktemp + local LAST_ERR=/tmp/tmp.8vTqZjh0Eg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-6d956cbb4b-7ftng + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9Z4p0xNHRJ + cat /tmp/tmp.8vTqZjh0Eg + rm /tmp/tmp.9Z4p0xNHRJ /tmp/tmp.8vTqZjh0Eg + return 0 } }, }, { }, }, { }, }, ""), }, { }, }, }, - }, - { - }, - { - }, - }, + }, - "0aef25cbe9db3e680efef270bf6bb336b54981dcbf537d4d1c94aafb9f6e5ea", - "16bd4b361cec490fd3468c861f53c9d9a4b57355e23ccaaf9e19b1dc748f18c0", + "16bd4b361cec490fd3468c861f53c9d9a4b57355e23ccaaf9e19b1dc748f18c0", ... // 16 identical fields ... // 16 identical fields 2025-10-23T16:41:03.735Z INFO setup Manager starting up {"gitCommit": "842b7a7ef3ccef7bd9060db5084b0e57f9f333fc", "gitBranch": "PR-2221-842b7a7e", "buildTime": "2025-10-23T14:30:37Z", "goVersion": "go1.25.3", "os": "linux", "arch": "amd64"} 2025-10-23T16:41:03.735Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1040000"} 2025-10-23T16:41:03.739Z INFO setup Registering Components. 2025-10-23T16:41:03.912Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-10-23T16:41:03.912Z INFO controller-runtime.metrics Starting metrics server 2025-10-23T16:41:03.912Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-10-23T16:41:03.912Z INFO controller-runtime.webhook Starting webhook server 2025-10-23T16:41:03.912Z INFO setup Starting the Cmd. 2025-10-23T16:41:03.912Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-10-23T16:41:03.913Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-10-23T16:41:03.913Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-10-23T16:41:03.913Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-10-23T16:41:04.014Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-10-23T16:41:04.060Z DEBUG events percona-xtradb-cluster-operator-6d956cbb4b-7ftng_f5fc3f0d-6980-48e3-ab8b-0b096f789f18 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"0a3fe980-4b7f-456a-abdc-911e2c392104","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1761237664054703009"}, "reason": "LeaderElection"} 2025-10-23T16:41:04.060Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-10-23T16:41:04.060Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-10-23T16:41:04.060Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-10-23T16:41:04.060Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-10-23T16:41:04.060Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-10-23T16:41:04.160Z INFO Starting Controller {"controller": "pxc-controller"} 2025-10-23T16:41:04.160Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-10-23T16:41:04.161Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-10-23T16:41:04.161Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-10-23T16:41:04.161Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-10-23T16:41:04.161Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-10-23T16:41:44.816Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "e5530dc8-9e1d-487b-ad7c-9d7691c07a2e", "version": "1.19.0"} 2025-10-23T16:41:45.139Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "e5530dc8-9e1d-487b-ad7c-9d7691c07a2e", "secrets": "my-cluster-secrets"} 2025-10-23T16:41:45.380Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "e5530dc8-9e1d-487b-ad7c-9d7691c07a2e", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-23T16:41:45.402Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "e5530dc8-9e1d-487b-ad7c-9d7691c07a2e", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-23T16:41:46.012Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "e5530dc8-9e1d-487b-ad7c-9d7691c07a2e", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-23T16:41:46.119Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "0bcdb0f2-81c8-4d55-a904-1a67661f0717", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-23T16:41:46.166Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "0bcdb0f2-81c8-4d55-a904-1a67661f0717", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-23T16:41:46.216Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "0bcdb0f2-81c8-4d55-a904-1a67661f0717", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-23T16:41:46.258Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "0bcdb0f2-81c8-4d55-a904-1a67661f0717", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-23T16:41:46.312Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "0bcdb0f2-81c8-4d55-a904-1a67661f0717", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-23T16:41:46.393Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "0bcdb0f2-81c8-4d55-a904-1a67661f0717", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-23T16:41:47.339Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "ae4e01cd-d8d0-4b71-9e98-7f8eeb11f7c5", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-23T16:41:47.401Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "ae4e01cd-d8d0-4b71-9e98-7f8eeb11f7c5", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-23T16:43:04.237Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c90f936f-59cd-418e-9dce-c56775627759", "user": "operator"} 2025-10-23T16:43:04.276Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c90f936f-59cd-418e-9dce-c56775627759", "user": "monitor"} 2025-10-23T16:43:04.318Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c90f936f-59cd-418e-9dce-c56775627759"} 2025-10-23T16:43:04.357Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c90f936f-59cd-418e-9dce-c56775627759", "user": "xtrabackup"} 2025-10-23T16:43:04.389Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c90f936f-59cd-418e-9dce-c56775627759"} 2025-10-23T16:43:04.397Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c90f936f-59cd-418e-9dce-c56775627759", "err": "get primary pxc pod: not found"} 2025-10-23T16:43:09.195Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "47ba99be-f81b-4456-89df-626f565c51d1", "err": "get primary pxc pod: not found"} 2025-10-23T16:43:14.367Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "82ae3697-0729-4789-83b2-595981cb1fc0", "err": "get primary pxc pod: not found"} 2025-10-23T16:43:19.517Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "b50dd652-bdb3-42a5-9da3-8fad5963515f", "err": "get primary pxc pod: not found"} 2025-10-23T16:45:30.574Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "da485ea6-1324-45c6-a4cb-026fdc026c1f", "user": "root"} 2025-10-23T16:45:30.614Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "da485ea6-1324-45c6-a4cb-026fdc026c1f", "user": "replication"} 2025-10-23T16:45:30.664Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "da485ea6-1324-45c6-a4cb-026fdc026c1f", "new version": "5.7.44-48-57"} 2025-10-23T16:45:32.447Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "da485ea6-1324-45c6-a4cb-026fdc026c1f"} 2025-10-23T16:45:37.350Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2234d845-1618-4264-a602-91c455cb5e61"} 2025-10-23T16:45:42.589Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c781dbb2-2105-4cda-b222-27a5c1e4e1c9"} 2025-10-23T16:45:47.812Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "de3e5020-d09b-4de8-97e4-c4aed8b84c45"} 2025-10-23T16:45:53.142Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "6d30fb17-8843-41e4-82e4-4fc664d377c6"} 2025-10-23T16:45:58.727Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "fae50d19-9594-4910-bc17-06a7a9c2e38b"} 2025-10-23T16:46:03.616Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "d4d4a796-6010-4fad-917f-2cd98870ad87"} 2025-10-23T16:46:08.734Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "88f0734e-5f29-4379-8b93-a27f5c39210c"} 2025-10-23T16:46:14.621Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "54c04954-04bf-4f43-ac99-edf00f4bac10"} 2025-10-23T16:46:19.853Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "7dacda6c-31a8-485b-876e-b293eaf8c4d6"} 2025-10-23T16:46:25.335Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "27d24487-bb9e-4e21-bc8a-5389b7577b21"} 2025-10-23T16:46:30.543Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "29511b50-aab0-48fc-a9e4-16aeabf1e4d9"} 2025-10-23T16:46:35.844Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "52abd7f1-ec7a-46e5-9b44-71456c4b24e1"} 2025-10-23T16:46:41.118Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "840a9781-f248-4819-a398-a384e7d8a69a"} 2025-10-23T16:46:46.507Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "8efd7160-6468-4f9a-9747-fb7e3ce2fcda"} 2025-10-23T16:46:51.705Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "fe89e0b6-1477-4764-a426-83c6178c19ac"} 2025-10-23T16:46:57.104Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "5634fc45-43f4-49c6-8dbd-586c4c1ae7e7"} 2025-10-23T16:47:02.257Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "9928e761-b789-49f8-b7b6-b549c1efc98f"} 2025-10-23T16:47:02.723Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "ae43c9bc-9868-4026-8aaf-cebf07993d0d", "user": "root"} 2025-10-23T16:47:02.740Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "ae43c9bc-9868-4026-8aaf-cebf07993d0d", "user": "root"} 2025-10-23T16:47:02.769Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "ae43c9bc-9868-4026-8aaf-cebf07993d0d", "secret": "some-name-mysql-init", "user": "root"} 2025-10-23T16:47:05.374Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "ae43c9bc-9868-4026-8aaf-cebf07993d0d"} 2025-10-23T16:47:05.394Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "ae43c9bc-9868-4026-8aaf-cebf07993d0d", "user": "root"} 2025-10-23T16:47:07.221Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "ae43c9bc-9868-4026-8aaf-cebf07993d0d"} 2025-10-23T16:47:12.557Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "ca4c4624-5746-4790-9c73-8a853ad4ce7b"} 2025-10-23T16:47:17.805Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "9dc4b4d2-6d1c-4d55-9032-d3f29205b8a4"} 2025-10-23T16:47:22.154Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "38c2f28a-83c0-4403-8fc5-4963e2f65994", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:47:22.210Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "38c2f28a-83c0-4403-8fc5-4963e2f65994", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:47:23.420Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "83c6a716-53de-4009-a8d4-68cffe0ec17a"} 2025-10-23T16:47:47.943Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "33d09147-86b1-455e-ba71-8b17f3adfea3", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-23T16:47:47.962Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "8706f276-2ee2-479a-bd5d-7240ae15f22f", "user": "proxyadmin"} 2025-10-23T16:47:47.962Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "8706f276-2ee2-479a-bd5d-7240ae15f22f", "user": "proxyadmin"} 2025-10-23T16:47:47.992Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "8706f276-2ee2-479a-bd5d-7240ae15f22f", "user": "proxyadmin"} 2025-10-23T16:47:48.014Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "8706f276-2ee2-479a-bd5d-7240ae15f22f", "user": "proxyadmin"} 2025-10-23T16:47:48.014Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "8706f276-2ee2-479a-bd5d-7240ae15f22f", "last-applied-secret": "2a2994bf3940e801a7bb30b4e66675db385fee5e9c35a8cf5d7686fa44d016e2"} 2025-10-23T16:47:48.018Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "8706f276-2ee2-479a-bd5d-7240ae15f22f", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:47:50.246Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "8706f276-2ee2-479a-bd5d-7240ae15f22f", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-23T16:48:37.157Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "efab74ff-f412-4aad-902e-eddd5732f79d"} 2025-10-23T16:48:40.709Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "610e201b-86ee-43e7-9f8c-0e594a893fb3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:48:40.768Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "610e201b-86ee-43e7-9f8c-0e594a893fb3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:48:43.030Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "933634fe-fe01-4b82-a7da-5003904bd98d", "user": "xtrabackup"} 2025-10-23T16:48:43.043Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "933634fe-fe01-4b82-a7da-5003904bd98d", "user": "xtrabackup"} 2025-10-23T16:48:43.152Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "933634fe-fe01-4b82-a7da-5003904bd98d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-23T16:48:43.219Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "933634fe-fe01-4b82-a7da-5003904bd98d", "user": "xtrabackup"} 2025-10-23T16:48:43.220Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "933634fe-fe01-4b82-a7da-5003904bd98d", "last-applied-secret": "d67d07d70295719d5665a9d2fd25395bb37de30f62d1cb4abd98238c2692e582"} 2025-10-23T16:48:43.223Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "933634fe-fe01-4b82-a7da-5003904bd98d", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:48:45.853Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "933634fe-fe01-4b82-a7da-5003904bd98d"} 2025-10-23T16:50:44.082Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f4698880-7c60-42da-9cbc-36cc50336f68", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:50:49.236Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "3a8263d9-07f4-45d2-885e-634746759cda", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:50:54.383Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f21c1796-9da8-4253-852a-ab960219c2c0", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:50:59.591Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "902dc7b6-2df6-4695-9540-5a71c16dc19a", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:51:04.772Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8aa8f62-2877-47fd-886c-adc80951de0e", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:51:09.960Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c2242a96-85fd-4428-b8f8-e405a8cbb28a", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:51:15.104Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "d8d250e0-829d-4602-a5ac-f270772ad3c4", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:51:20.277Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "5afdfa8d-b5f0-49c3-8c12-2a686a537663", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:51:28.246Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "9ecb1686-72dd-4bc9-98c8-2b10e01e46ba"} 2025-10-23T16:51:32.930Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "55a552cf-fe12-4024-99a7-072a7f196013"} 2025-10-23T16:51:35.040Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2ed1125d-d332-481e-ba45-271ff5d967fc", "user": "monitor"} 2025-10-23T16:51:35.053Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2ed1125d-d332-481e-ba45-271ff5d967fc", "user": "monitor"} 2025-10-23T16:51:35.082Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2ed1125d-d332-481e-ba45-271ff5d967fc", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-23T16:51:35.101Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2ed1125d-d332-481e-ba45-271ff5d967fc", "user": "monitor"} 2025-10-23T16:51:35.124Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2ed1125d-d332-481e-ba45-271ff5d967fc", "user": "monitor"} 2025-10-23T16:51:35.124Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2ed1125d-d332-481e-ba45-271ff5d967fc", "last-applied-secret": "7b8cafa30cfd17392bff38868216464d94e5393bdafad2cfff759068c6a873dd"} 2025-10-23T16:51:35.128Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2ed1125d-d332-481e-ba45-271ff5d967fc", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:51:37.281Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2ed1125d-d332-481e-ba45-271ff5d967fc", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-23T16:52:17.440Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "d7be2157-6afc-4e21-b1b0-bbdb757e9ee1"} 2025-10-23T16:52:22.549Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f333ab8e-01eb-4b3c-9489-9ac12f460987"} 2025-10-23T16:52:34.865Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "d3c4bf2c-95d8-450d-a3a5-3d527cfd826d"} 2025-10-23T16:52:37.166Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "5201c46e-aa7d-41c5-b3bd-9d3190b06ca9", "user": "operator"} 2025-10-23T16:52:37.178Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "5201c46e-aa7d-41c5-b3bd-9d3190b06ca9", "user": "operator"} 2025-10-23T16:52:37.195Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "5201c46e-aa7d-41c5-b3bd-9d3190b06ca9", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-23T16:52:37.217Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "5201c46e-aa7d-41c5-b3bd-9d3190b06ca9", "user": "operator"} 2025-10-23T16:52:37.217Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "5201c46e-aa7d-41c5-b3bd-9d3190b06ca9", "last-applied-secret": "60aef25cbe9db3e680efef270bf6bb336b54981dcbf537d4d1c94aafb9f6e5ea"} 2025-10-23T16:52:37.221Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "5201c46e-aa7d-41c5-b3bd-9d3190b06ca9", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:52:41.031Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "5201c46e-aa7d-41c5-b3bd-9d3190b06ca9", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-23T16:53:08.048Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "e857b033-41f6-4696-bae3-21ace69725d1"} 2025-10-23T16:53:23.813Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c8c52f0a-e481-4346-8e17-54313115a6f9"} 2025-10-23T16:53:24.371Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "secrets": "my-cluster-secrets-2"} 2025-10-23T16:53:24.371Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "root"} 2025-10-23T16:53:24.389Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "root"} 2025-10-23T16:53:24.410Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "secret": "some-name-mysql-init", "user": "root"} 2025-10-23T16:53:26.934Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664"} 2025-10-23T16:53:26.955Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "root"} 2025-10-23T16:53:26.955Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "operator"} 2025-10-23T16:53:26.967Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "operator"} 2025-10-23T16:53:26.988Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-23T16:53:27.012Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "operator"} 2025-10-23T16:53:27.012Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "monitor"} 2025-10-23T16:53:27.024Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "monitor"} 2025-10-23T16:53:27.040Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-23T16:53:27.059Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "monitor"} 2025-10-23T16:53:27.080Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "monitor"} 2025-10-23T16:53:27.080Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "xtrabackup"} 2025-10-23T16:53:27.098Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "xtrabackup"} 2025-10-23T16:53:27.117Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-23T16:53:27.134Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "xtrabackup"} 2025-10-23T16:53:27.134Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "replication"} 2025-10-23T16:53:27.148Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "replication"} 2025-10-23T16:53:27.168Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "secret": "some-name-mysql-init", "user": "replication"} 2025-10-23T16:53:27.198Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "replication"} 2025-10-23T16:53:27.198Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "proxyadmin"} 2025-10-23T16:53:27.222Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "proxyadmin"} 2025-10-23T16:53:27.269Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "user": "proxyadmin"} 2025-10-23T16:53:27.269Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "last-applied-secret": "933694d940f437cbe59b293df99a692fb4bdfd45d4f6ed429cb88ddf80637121"} 2025-10-23T16:53:27.269Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "last-applied-secret": "933694d940f437cbe59b293df99a692fb4bdfd45d4f6ed429cb88ddf80637121"} 2025-10-23T16:53:27.272Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:53:27.468Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:53:30.386Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2c8f5a88-0911-43c0-b9e0-58076ea2f664", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-23T16:55:14.222Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "b0cba795-1387-4ed9-9f65-9e686f582dd3", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13708 on 34.118.224.10:53: no such host"} 2025-10-23T16:55:14.523Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "b6287097-4160-4521-9fa8-f5ffedabf05d", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13708 on 34.118.224.10:53: no such host"} 2025-10-23T16:55:19.538Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "e6f71509-28a6-4302-acac-46d1e952e445", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13708 on 34.118.224.10:53: no such host"} 2025-10-23T16:55:24.811Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "d6a3e25c-5ee1-4207-9cb7-dd5d229f197f", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13708 on 34.118.224.10:53: no such host"} 2025-10-23T16:55:29.965Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c1d36040-ad4f-4719-9df0-0fa36391f33e", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:55:35.124Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "3fad035f-1c70-473c-a7d6-d0a99f799eef", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:55:40.271Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "641b715b-d9d9-43c8-998c-d869bd996ebc", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:55:45.442Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "ec8d4115-457e-4b2c-885f-545f12ac54c0", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:55:50.584Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "3d260b5f-c282-423c-8544-08b08c74fc20", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:55:55.730Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "1774f5f3-4a35-4d9e-af94-f6977fdb8104", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:56:06.032Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "bf856732-4e05-4305-a96b-37734f965771", "primary name": "some-name-pxc-0.some-name-pxc.users-13708.svc.cluster.local"} 2025-10-23T16:56:14.143Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "843207e3-f342-4c6e-b247-2db24ac316ea"} 2025-10-23T16:56:18.735Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "1625d7f5-81a7-40ea-8e22-15a9a35769d6"} 2025-10-23T16:56:20.219Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "9bbea58c-7b57-4398-86d6-45ba4357ebee", "user": "operator"} 2025-10-23T16:56:20.232Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "9bbea58c-7b57-4398-86d6-45ba4357ebee", "user": "operator"} 2025-10-23T16:56:20.252Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "9bbea58c-7b57-4398-86d6-45ba4357ebee", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-23T16:56:20.276Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "9bbea58c-7b57-4398-86d6-45ba4357ebee", "user": "operator"} 2025-10-23T16:56:20.276Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "9bbea58c-7b57-4398-86d6-45ba4357ebee", "last-applied-secret": "bda9d792a1c13f7e840200da71a5545af93075bda646c34b27748c11ea49ff2b"} 2025-10-23T16:56:20.280Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "9bbea58c-7b57-4398-86d6-45ba4357ebee", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:56:23.863Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "9bbea58c-7b57-4398-86d6-45ba4357ebee", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13708.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-23T16:57:02.634Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f3a1ec34-746c-4e5d-94da-ce37143ea239"} 2025-10-23T16:57:07.220Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "a3bbfd1c-8aef-481a-b7b3-302676d3daa5"} 2025-10-23T16:57:12.468Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "dc86acd6-15b8-4e8d-b3ef-4b8443fe421c"} 2025-10-23T16:57:17.450Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "427d3c10-1ab8-441b-a381-ac58eebdcbb5"} 2025-10-23T16:57:22.666Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "3b002cb3-6e23-42ab-8582-9c6d2ee3e5e2"} 2025-10-23T16:57:28.227Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "5ec56f1d-0332-4ceb-83a0-94e76fcbd3b2"} 2025-10-23T16:57:33.220Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "34f9d10a-2d3a-4734-a2e9-4423f8c08be0"} 2025-10-23T16:57:38.966Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "e8bd1f67-0025-4154-a079-714d1f3c6a02"} 2025-10-23T16:57:44.461Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "27240708-8475-4333-8bfe-e9f19907be78"} 2025-10-23T16:57:49.662Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "50e60348-e913-4d40-aa5e-e25a93092cd9"} 2025-10-23T16:57:54.764Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "bce51a6d-de27-4b2f-8989-7d0d558ec572"} 2025-10-23T16:58:00.428Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "146de3cf-43af-4845-973a-a44fd599e913"} 2025-10-23T16:58:05.774Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "d23770d3-d69e-4885-8dc8-79e1142ea105"} 2025-10-23T16:58:10.832Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "84ee59f0-5c46-4873-b9d8-db74b35aacde"} 2025-10-23T16:58:16.320Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "05cd65cf-7c66-410d-a09e-e13b9a5c58d7"} 2025-10-23T16:58:21.147Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "2e3dc487-3b36-41af-a708-a2fd009c8d25"} 2025-10-23T16:58:22.144Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "root"} 2025-10-23T16:58:22.163Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "root"} 2025-10-23T16:58:22.183Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "secret": "some-name-mysql-init", "user": "root"} 2025-10-23T16:58:24.747Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8"} 2025-10-23T16:58:24.770Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "root"} 2025-10-23T16:58:24.770Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "monitor"} 2025-10-23T16:58:24.782Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "monitor"} 2025-10-23T16:58:24.805Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-23T16:58:24.823Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "monitor"} 2025-10-23T16:58:24.843Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "monitor"} 2025-10-23T16:58:24.843Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "xtrabackup"} 2025-10-23T16:58:24.854Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "xtrabackup"} 2025-10-23T16:58:24.872Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-23T16:58:24.895Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "xtrabackup"} 2025-10-23T16:58:24.895Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "proxyadmin"} 2025-10-23T16:58:24.915Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "proxyadmin"} 2025-10-23T16:58:24.934Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "user": "proxyadmin"} 2025-10-23T16:58:24.934Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "last-applied-secret": "16bd4b361cec490fd3468c861f53c9d9a4b57355e23ccaaf9e19b1dc748f18c0"} 2025-10-23T16:58:24.934Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "last-applied-secret": "16bd4b361cec490fd3468c861f53c9d9a4b57355e23ccaaf9e19b1dc748f18c0"} 2025-10-23T16:58:24.939Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:58:25.047Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:58:27.216Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "f8cbab03-a18e-4bf0-a976-3c22e71e35d8", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-23T16:58:45.059Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "51181038-358d-4adf-a545-ea7406c4bcbf", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T16:58:45.102Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "51181038-358d-4adf-a545-ea7406c4bcbf", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-23T16:58:45.154Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "51181038-358d-4adf-a545-ea7406c4bcbf", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-23T16:58:45.212Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "51181038-358d-4adf-a545-ea7406c4bcbf", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-23T16:58:45.296Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "51181038-358d-4adf-a545-ea7406c4bcbf", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-23T16:58:46.008Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "02556a0e-c628-49c3-9b88-1fd046d36aa2", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-23T17:01:46.020Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "root"} 2025-10-23T17:01:46.034Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "root"} 2025-10-23T17:01:46.054Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "secret": "some-name-mysql-init", "user": "root"} 2025-10-23T17:01:46.096Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "root"} 2025-10-23T17:01:46.096Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "operator"} 2025-10-23T17:01:46.109Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "operator"} 2025-10-23T17:01:46.149Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-23T17:01:46.172Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "operator"} 2025-10-23T17:01:46.172Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "monitor"} 2025-10-23T17:01:46.185Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "monitor"} 2025-10-23T17:01:46.213Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-23T17:01:46.234Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "monitor"} 2025-10-23T17:01:46.234Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "xtrabackup"} 2025-10-23T17:01:46.246Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "xtrabackup"} 2025-10-23T17:01:46.270Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-23T17:01:46.291Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "xtrabackup"} 2025-10-23T17:01:46.291Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "replication"} 2025-10-23T17:01:46.303Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "replication"} 2025-10-23T17:01:46.329Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "secret": "some-name-mysql-init", "user": "replication"} 2025-10-23T17:01:46.354Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "last-applied-secret": "60aef25cbe9db3e680efef270bf6bb336b54981dcbf537d4d1c94aafb9f6e5ea"} 2025-10-23T17:01:46.354Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "user": "replication"} 2025-10-23T17:01:46.354Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "last-applied-secret": "60aef25cbe9db3e680efef270bf6bb336b54981dcbf537d4d1c94aafb9f6e5ea"} 2025-10-23T17:01:46.357Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T17:01:46.447Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c761c312-9cd9-4654-b5ab-eb1b7a5a7374", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T17:03:20.283Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "b3b5013b-69f6-48c7-b9ad-53fa189f07af", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13708 on 34.118.224.10:53: no such host"} 2025-10-23T17:03:20.649Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "a40889ae-43ca-4bab-9369-6f13c71910f9", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13708 on 34.118.224.10:53: no such host"} 2025-10-23T17:04:13.666Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "671c6e5f-e14d-42bb-a552-cbba64c362da", "user": "monitor"} 2025-10-23T17:04:13.677Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "671c6e5f-e14d-42bb-a552-cbba64c362da", "user": "monitor"} 2025-10-23T17:04:13.698Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "671c6e5f-e14d-42bb-a552-cbba64c362da", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-23T17:04:13.720Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "671c6e5f-e14d-42bb-a552-cbba64c362da", "last-applied-secret": "4c4bc8fea3631ff3ec650300ad0cd06149b0a8b951467ea62d17ab0987f4711c"} 2025-10-23T17:04:13.720Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "671c6e5f-e14d-42bb-a552-cbba64c362da", "user": "monitor"} 2025-10-23T17:04:13.723Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "671c6e5f-e14d-42bb-a552-cbba64c362da", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-23T17:04:28.228Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "c21be042-b24c-4042-9a98-53372f44ce9c", "err": "get primary pxc pod: failed to get proxy connection: invalid connection"} 2025-10-23T17:04:31.530Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13708", "name": "some-name", "reconcileID": "a02c1fec-4c84-45c3-b15d-21054172e47b", "err": "get primary pxc pod: failed to get proxy connection: invalid connection"} ... // 22 identical fields - "2a2994bf3940e801a7bb30b4e66675db385fee5e9c35a8cf5d7686fa44d016e2", ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields ... // 3 identical elements ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields + "4c4bc8fea3631ff3ec650300ad0cd06149b0a8b951467ea62d17ab0987f4711c", ... // 4 identical fields ... // 5 identical fields ... // 5 identical fields ... // 5 identical fields "6", - "60aef25cbe9db3e680efef270bf6bb336b54981dcbf537d4d1c94aafb9f6e5ea", + "60aef25cbe9db3e680efef270bf6bb336b54981dcbf537d4d1c94aafb9f6e5ea", ... // 6 identical fields ... // 6 identical fields - "7b8cafa30cfd17392bff38868216464d94e5393bdafad2cfff759068c6a873dd", + "7b8cafa30cfd17392bff38868216464d94e5393bdafad2cfff759068c6a873dd", ... // 7 identical fields ... // 8 identical fields + "933", - "933694d940f437cbe59b293df99a692fb4bdfd45d4f6ed429cb88ddf80637121", + "933694d940f437cbe59b293df99a692fb4bdfd45d4f6ed429cb88ddf80637121", + "94d940f437cbe59b293df99a692fb4bdfd45d4f6ed429cb88ddf80637121", ... // 9 identical fields ... // 9 identical fields AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ + APIVersion: "", - APIVersion: "apps/v1", - APIVersion: "apps/v1", - APIVersion: "v1", Args: {"haproxy"}, Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, - Args: []string{"logrotate"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, - "bda9d792a1c13f7e840200da71a5545af93075bda646c34b27748c11ea49ff2b", + "bda9d792a1c13f7e840200da71a5545af93075bda646c34b27748c11ea49ff2b", Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMapKeyRef: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 3307, ContainerPort: 3309, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, ContainerPort: 8404, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-10-23 16:41:46 +0000 UTC"}, - CreationTimestamp: v1.Time{Time: s"2025-10-23 16:58:45 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-haproxy-569b5b4d76", - CurrentRevision: "some-name-haproxy-6f4b558f5c", - CurrentRevision: "some-name-proxysql-5fbd847ccf", - CurrentRevision: "some-name-proxysql-6c779bbf4d", - CurrentRevision: "some-name-proxysql-6dcb865888", - CurrentRevision: "some-name-proxysql-6fc8b9bf4", - CurrentRevision: "some-name-proxysql-79849b7b79", - CurrentRevision: "some-name-proxysql-7b89b657c8", - CurrentRevision: "some-name-pxc-5bfdcffdff", - CurrentRevision: "some-name-pxc-676c95767c", - CurrentRevision: "some-name-pxc-6877fd5c7", - CurrentRevision: "some-name-pxc-7c49c4c58f", - "d67d07d70295719d5665a9d2fd25395bb37de30f62d1cb4abd98238c2692e582", DataSource: nil, DataSourceRef: nil, - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-haproxy"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, - EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "READINESS_CHECK_TIMEOUT", Value: "1"}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, Env: []v1.EnvVar{ - Env: []v1.EnvVar{ EphemeralContainers: nil, FailureThreshold: 3, FC: nil, FieldPath: "metadata.name", FieldPath: "metadata.namespace", FieldRef: &v1.ObjectFieldSelector{ - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 HostAliases: nil, HostAliases: nil, HostIP: "", HostIPC: false, Hostname: "", HostPort: 0, - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", ImagePullPolicy: "Always", - ImagePullPolicy: "Always", ImagePullSecrets: nil, InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "haproxy", "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "2a2994bf3940e801a7bb30b4e66675db385fee5e9c35a8cf5d7686fa44d016e2", + "last-applied-secret": "60aef25cbe9db3e680efef270bf6bb336b54981dcbf537d4d1c94aafb9f6e5ea", + "last-applied-secret": "d67d07d70295719d5665a9d2fd25395bb37de30f62d1cb4abd98238c2692e582", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-haproxy"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, [mysql] 2025/10/23 17:03:45 packets.go:58 read tcp 10.209.193.61:34508->34.118.226.34:3306: i/o timeout [mysql] 2025/10/23 17:04:25 packets.go:58 unexpected EOF [mysql] 2025/10/23 17:04:26 packets.go:58 unexpected EOF [mysql] 2025/10/23 17:04:27 packets.go:58 unexpected EOF [mysql] 2025/10/23 17:04:28 packets.go:58 unexpected EOF [mysql] 2025/10/23 17:04:29 packets.go:58 unexpected EOF [mysql] 2025/10/23 17:04:30 packets.go:58 unexpected EOF [mysql] 2025/10/23 17:04:31 packets.go:58 unexpected EOF Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, {Name: "CLUSTER_HASH", Value: "3571636"}, Name: "config", Name: "DEFAULT_AUTHENTICATION_PLUGIN", {Name: "haproxy-auto", VolumeSource: {EmptyDir: &{}}}, Name: "haproxy-custom", - {Name: "IS_LOGCOLLECTOR", Value: "yes"}, Name: "ist", {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - Name: "logrotate", - Name: "logs", {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, - {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"}, Name: "mysql-replicas", {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"}, Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, Name: "POD_NAME", Name: "POD_NAMESPASE", - {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, - {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, Name: "proxyadm", Name: "proxy-protocol", {Name: "READINESS_CHECK_TIMEOUT", Value: "15"}, - {Name: "SERVICE_TYPE", Value: "mysql"}, Name: "some-name-env-vars-haproxy", Namespace: "users-13708", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "2a2994bf3940e801a7bb30b4e66675db385fee5e9c35a8cf5d7686fa44d016e2", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "16bd4b361cec490fd3468c861f53c9d9a4b57355e23ccaaf9e19b1dc748f18c0", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "624c8511-459c-48d6-8d2c-6a00780d83e7", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmEyOTk0YmYzOTQwZTgwMWE3YmIzMGI0ZTY2Njc1ZGIzODVmZWU1ZTljMzVhOGNmNWQ3Njg2ZmE0NGQwMTZlMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTZiZDRiMzYxY2VjNDkwZmQzNDY4Yzg2MWY1M2M5ZDlhNGI1NzM1NWUyM2NjYWFmOWUxOWIxZGM3NDhmMThjMCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiN2I4Y2FmYTMwY2ZkMTczOTJiZmYzODg2ODIxNjQ2NGQ5NGU1MzkzYmRhZmFkMmNmZmY3NTkwNjhjNmE4NzNkZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiN2I4Y2FmYTMwY2ZkMTczOTJiZmYzODg2ODIxNjQ2NGQ5NGU1MzkzYmRhZmFkMmNmZmY3NTkwNjhjNmE4NzNkZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNjBhZWYyNWNiZTlkYjNlNjgwZWZlZjI3MGJmNmJiMzM2YjU0OTgxZGNiZjUzN2Q0ZDFjOTRhYWZiOWY2ZTVlYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNjBhZWYyNWNiZTlkYjNlNjgwZWZlZjI3MGJmNmJiMzM2YjU0OTgxZGNiZjUzN2Q0ZDFjOTRhYWZiOWY2ZTVlYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTMzNjk0ZDk0MGY0MzdjYmU1OWIyOTNkZjk5YTY5MmZiNGJkZmQ0NWQ0ZjZlZDQyOWNiODhkZGY4MDYzNzEyMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTMzNjk0ZDk0MGY0MzdjYmU1OWIyOTNkZjk5YTY5MmZiNGJkZmQ0NWQ0ZjZlZDQyOWNiODhkZGY4MDYzNzEyMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmRhOWQ3OTJhMWMxM2Y3ZTg0MDIwMGRhNzFhNTU0NWFmOTMwNzViZGE2NDZjMzRiMjc3NDhjMTFlYTQ5ZmYyYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmRhOWQ3OTJhMWMxM2Y3ZTg0MDIwMGRhNzFhNTU0NWFmOTMwNzViZGE2NDZjMzRiMjc3NDhjMTFlYTQ5ZmYyYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNGM0YmM4ZmVhMzYzMWZmM2VjNjUwMzAwYWQwY2QwNjE0OWIwYThiOTUxNDY3ZWE2MmQxN2FiMDk4N2Y0NzExYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNjBhZWYyNWNiZTlkYjNlNjgwZWZlZjI3MGJmNmJiMzM2YjU0OTgxZGNiZjUzN2Q0ZDFjOTRhYWZiOWY2ZTVlYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNjBhZWYyNWNiZTlkYjNlNjgwZWZlZjI3MGJmNmJiMzM2YjU0OTgxZGNiZjUzN2Q0ZDFjOTRhYWZiOWY2ZTVlYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSJ9fSwic3BlYyI6eyJ2b2x1bWVzIjpbeyJuYW1lIjoiaGFwcm94eS1jdXN0b20iLCJjb25maWdNYXAi"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTZiZDRiMzYxY2VjNDkwZmQzNDY4Yzg2MWY1M2M5ZDlhNGI1NzM1NWUyM2NjYWFmOWUxOWIxZGM3NDhmMThjMCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTZiZDRiMzYxY2VjNDkwZmQzNDY4Yzg2MWY1M2M5ZDlhNGI1NzM1NWUyM2NjYWFmOWUxOWIxZGM3NDhmMThjMCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTZiZDRiMzYxY2VjNDkwZmQzNDY4Yzg2MWY1M2M5ZDlhNGI1NzM1NWUyM2NjYWFmOWUxOWIxZGM3NDhmMThjMCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjIxLTg0MmI3YTdlIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM1LjciLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTZiZDRiMzYxY2VjNDkwZmQzNDY4Yzg2MWY1M2M5ZDlhNGI1NzM1NWUyM2NjYWFmOWUxOWIxZGM3NDhmMThjMCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjIxLTg0MmI3YTdlIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzUuNyIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIzNTcxNjM2In0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNjBhZWYyNWNiZTlkYjNlNjgwZWZlZjI3MGJmNmJiMzM2YjU0OTgxZGNiZjUzN2Q0ZDFjOTRhYWZiOWY2ZTVlYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTMzNjk0ZDk0MGY0MzdjYmU1OWIyOTNkZjk5YTY5MmZiNGJkZmQ0NWQ0ZjZlZDQyOWNiODhkZGY4MDYzNzEyMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTMzNjk0ZDk0MGY0MzdjYmU1OWIyOTNkZjk5YTY5MmZiNGJkZmQ0NWQ0ZjZlZDQyOWNiODhkZGY4MDYzNzEyMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDY3ZDA3ZDcwMjk1NzE5ZDU2NjVhOWQyZmQyNTM5NWJiMzdkZTMwZjYyZDFjYjRhYmQ5ODIzOGMyNjkyZTU4MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDY3ZDA3ZDcwMjk1NzE5ZDU2NjVhOWQyZmQyNTM5NWJiMzdkZTMwZjYyZDFjYjRhYmQ5ODIzOGMyNjkyZTU4MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmEyOTk0YmYzOTQwZTgwMWE3YmIzMGI0ZTY2Njc1ZGIzODVmZWU1ZTljMzVhOGNmNWQ3Njg2ZmE0NGQwMTZlMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: nil, Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, ResizePolicy: nil, ResourceFieldRef: nil, Resources: {}, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1761237926740159016", - ResourceVersion: "1761237969302047011", - ResourceVersion: "1761238062661295011", - ResourceVersion: "1761238099507823011", - ResourceVersion: "1761238121454079011", - ResourceVersion: "1761238280747279016", - ResourceVersion: "1761238326898655011", - ResourceVersion: "1761238377999231011", - ResourceVersion: "1761238449124095011", - ResourceVersion: "1761238566287535016", - ResourceVersion: "1761238601362399011", - ResourceVersion: "1761238718060447016", - ResourceVersion: "1761238787024271002", - ResourceVersion: "1761238902883103016", - ResourceVersion: "1761238972842959002", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", + SchedulerName: "", - SchedulerName: "default-scheduler", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-env-vars-haproxy", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, + SecurityContext: nil, - SecurityContext: s"&PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmorProfile:nil,SupplementalGroupsPolicy:nil,SELinux"..., Selector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-haproxy", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, ShareProcessNamespace: nil, sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-10-23 16:41:46 +0000 UTC", - Time: s"2025-10-23 16:45:26 +0000 UTC", - Time: s"2025-10-23 16:46:09 +0000 UTC", - Time: s"2025-10-23 16:47:22 +0000 UTC", - Time: s"2025-10-23 16:47:42 +0000 UTC", - Time: s"2025-10-23 16:47:48 +0000 UTC", - Time: s"2025-10-23 16:48:19 +0000 UTC", - Time: s"2025-10-23 16:48:40 +0000 UTC", - Time: s"2025-10-23 16:48:41 +0000 UTC", - Time: s"2025-10-23 16:48:43 +0000 UTC", - Time: s"2025-10-23 16:51:20 +0000 UTC", - Time: s"2025-10-23 16:51:35 +0000 UTC", - Time: s"2025-10-23 16:52:06 +0000 UTC", - Time: s"2025-10-23 16:52:37 +0000 UTC", - Time: s"2025-10-23 16:52:57 +0000 UTC", - Time: s"2025-10-23 16:53:27 +0000 UTC", - Time: s"2025-10-23 16:54:09 +0000 UTC", - Time: s"2025-10-23 16:56:06 +0000 UTC", - Time: s"2025-10-23 16:56:20 +0000 UTC", - Time: s"2025-10-23 16:56:41 +0000 UTC", - Time: s"2025-10-23 16:58:25 +0000 UTC", - Time: s"2025-10-23 16:58:38 +0000 UTC", - Time: s"2025-10-23 16:58:45 +0000 UTC", - Time: s"2025-10-23 16:59:47 +0000 UTC", - Time: s"2025-10-23 17:01:42 +0000 UTC", - Time: s"2025-10-23 17:01:46 +0000 UTC", - Time: s"2025-10-23 17:02:52 +0000 UTC", Tolerations: {{Key: "node.alpha.kubernetes.io/unreachable", Operator: "Exists", Effect: "NoExecute", TolerationSeconds: &6000}}, Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "23040a65-3e57-4dad-99fd-bfee331bbaf2", - UID: "2c54a875-30a1-4081-8263-c32b5c4c511b", - UID: "6ace8776-27c4-4ae6-ac10-90a5bb4662f3", + UpdatedReplicas: 0, - UpdatedReplicas: 1, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-haproxy-569b5b4d76", - UpdateRevision: "some-name-haproxy-6f4b558f5c", - UpdateRevision: "some-name-proxysql-5fbd847ccf", - UpdateRevision: "some-name-proxysql-6c779bbf4d", - UpdateRevision: "some-name-proxysql-6dcb865888", - UpdateRevision: "some-name-proxysql-6fc8b9bf4", - UpdateRevision: "some-name-proxysql-79849b7b79", - UpdateRevision: "some-name-proxysql-7b89b657c8", - UpdateRevision: "some-name-pxc-5bfdcffdff", - UpdateRevision: "some-name-pxc-5d9c9b969d", - UpdateRevision: "some-name-pxc-676c95767c", - UpdateRevision: "some-name-pxc-6877fd5c7", - UpdateRevision: "some-name-pxc-7c49c4c58f", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ Value: "", + Value: "caching_sha2_password", ValueFrom: nil, ValueFrom: &v1.EnvVarSource{ - Value: "mysql_native_password", VolumeAttributesClassName: nil, VolumeClaimTemplates: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, - VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-13708 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.pyP1OUT3RJ ++ mktemp + local LAST_ERR=/tmp/tmp.oJaN4bMit5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pyP1OUT3RJ perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-13708 namespace + cat /tmp/tmp.oJaN4bMit5 + rm /tmp/tmp.pyP1OUT3RJ /tmp/tmp.oJaN4bMit5 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.DlFdkWSe3G ++ mktemp + local LAST_ERR=/tmp/tmp.oOJGDb3DW5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DlFdkWSe3G No resources found + cat /tmp/tmp.oOJGDb3DW5 + rm /tmp/tmp.DlFdkWSe3G /tmp/tmp.oOJGDb3DW5 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.WXbJQ1R6OG ++ mktemp + local LAST_ERR=/tmp/tmp.m3gnRiATEq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WXbJQ1R6OG No resources found + cat /tmp/tmp.m3gnRiATEq + rm /tmp/tmp.WXbJQ1R6OG /tmp/tmp.m3gnRiATEq + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.8VsB2ckj0I ++ mktemp + local LAST_ERR=/tmp/tmp.phtX1QznRX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8VsB2ckj0I validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.phtX1QznRX + rm /tmp/tmp.8VsB2ckj0I /tmp/tmp.phtX1QznRX + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-13708 + rm -rf /tmp/tmp.3yLEg1h0KG + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace + local LAST_OUT=/tmp/tmp.uo4bMdb0Ys ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.ePJKCt3QXb ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.BKUab2Z9yk + local exit_status=0 + local LAST_ERR=/tmp/tmp.w3hGFFklaS + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-13708