Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/logs/users-5-7.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-1597 + local ns=users-1597 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-32379 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.PkL1h7PG7q ++ mktemp + local LAST_ERR=/tmp/tmp.SZSg8o9EWR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PkL1h7PG7q perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-32379 namespace + cat /tmp/tmp.SZSg8o9EWR + rm /tmp/tmp.PkL1h7PG7q /tmp/tmp.SZSg8o9EWR + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.F6NTEqCDyU ++ mktemp + local LAST_ERR=/tmp/tmp.q6vysdAeBk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.F6NTEqCDyU No resources found + cat /tmp/tmp.q6vysdAeBk + rm /tmp/tmp.F6NTEqCDyU /tmp/tmp.q6vysdAeBk + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.k3onS5lgGM ++ mktemp + local LAST_ERR=/tmp/tmp.MyWHatrOWR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.k3onS5lgGM No resources found + cat /tmp/tmp.MyWHatrOWR + rm /tmp/tmp.k3onS5lgGM /tmp/tmp.MyWHatrOWR + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + local LAST_OUT=/tmp/tmp.wbPkoYXAAM + local LAST_OUT=/tmp/tmp.mmt56nKcAE ++ mktemp + local LAST_ERR=/tmp/tmp.vHCJwBFSri + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.UnXPtbudQw + local exit_status=0 + for i in '$(seq 0 2)' ++ seq 0 2 + set +e + kubectl delete namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mmt56nKcAE + cat /tmp/tmp.UnXPtbudQw + rm /tmp/tmp.mmt56nKcAE /tmp/tmp.UnXPtbudQw + return 0 namespace "users-32379" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wbPkoYXAAM namespace "pxc-operator" deleted + cat /tmp/tmp.vHCJwBFSri + rm /tmp/tmp.wbPkoYXAAM /tmp/tmp.vHCJwBFSri + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.PTlRptvYI9 ++ mktemp + local LAST_ERR=/tmp/tmp.RkfMioaTxH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PTlRptvYI9 namespace/pxc-operator created + cat /tmp/tmp.RkfMioaTxH + rm /tmp/tmp.PTlRptvYI9 /tmp/tmp.RkfMioaTxH + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.5xhfJIjTDE +++ mktemp ++ local LAST_ERR=/tmp/tmp.RrjhhreY6y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5xhfJIjTDE ++ cat /tmp/tmp.RrjhhreY6y ++ rm /tmp/tmp.5xhfJIjTDE /tmp/tmp.RrjhhreY6y ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster3 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.hOePbu4YwR ++ mktemp + local LAST_ERR=/tmp/tmp.rKgEMhVWKG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster3 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hOePbu4YwR Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster3" modified. + cat /tmp/tmp.rKgEMhVWKG + rm /tmp/tmp.hOePbu4YwR /tmp/tmp.rKgEMhVWKG + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.PDZeuChDkn ++ mktemp + local LAST_ERR=/tmp/tmp.wRsU7yRia7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PDZeuChDkn customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.wRsU7yRia7 + rm /tmp/tmp.PDZeuChDkn /tmp/tmp.wRsU7yRia7 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.aJwaS0QlM6 ++ mktemp + local LAST_ERR=/tmp/tmp.IZZfPi4Brf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aJwaS0QlM6 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.IZZfPi4Brf + rm /tmp/tmp.aJwaS0QlM6 /tmp/tmp.IZZfPi4Brf + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2154-7a623b10^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.qqFumTlago ++ mktemp + local LAST_ERR=/tmp/tmp.WMaHC38Uni + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qqFumTlago deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.WMaHC38Uni + rm /tmp/tmp.qqFumTlago /tmp/tmp.WMaHC38Uni + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.LNPMvs6FGE ++ mktemp + local LAST_ERR=/tmp/tmp.9K1PZE5e6j + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LNPMvs6FGE pod/percona-xtradb-cluster-operator-6cf85965f9-dc5tx condition met + cat /tmp/tmp.9K1PZE5e6j + rm /tmp/tmp.LNPMvs6FGE /tmp/tmp.9K1PZE5e6j + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.6Tf07KdLY3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KNig4cYO4l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6Tf07KdLY3 ++ cat /tmp/tmp.KNig4cYO4l ++ rm /tmp/tmp.6Tf07KdLY3 /tmp/tmp.KNig4cYO4l ++ return 0 + wait_pod percona-xtradb-cluster-operator-6cf85965f9-dc5tx 480 pxc-operator + local pod=percona-xtradb-cluster-operator-6cf85965f9-dc5tx + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-6cf85965f9-dc5tx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-6cf85965f9-dc5tx condition met waiting for pod/percona-xtradb-cluster-operator-6cf85965f9-dc5tx to become Ready.Ok + sleep 3 + create_namespace users-1597 + local namespace=users-1597 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces users-1597' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-1597 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-1597 ++ mktemp + kubectl_bin get ns + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.iuO8hZJr6g ++ mktemp + awk '{print$1}' + local LAST_OUT=/tmp/tmp.DMgW37IPSN + local LAST_ERR=/tmp/tmp.KsjTlDWiHa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ mktemp + local LAST_ERR=/tmp/tmp.322U678orH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-1597 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-1597 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iuO8hZJr6g + cat /tmp/tmp.KsjTlDWiHa + rm /tmp/tmp.iuO8hZJr6g /tmp/tmp.KsjTlDWiHa + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-1597 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.DMgW37IPSN + cat /tmp/tmp.322U678orH Error from server (NotFound): namespaces "users-1597" not found + rm /tmp/tmp.DMgW37IPSN /tmp/tmp.322U678orH + return 1 + : + wait_for_delete namespace/users-1597 + local res=namespace/users-1597 + echo -n 'waiting for namespace/users-1597 to be deleted' waiting for namespace/users-1597 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-1597" not found + desc 'create namespace users-1597' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-1597 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-1597 ++ mktemp + local LAST_OUT=/tmp/tmp.Do3nIT8BlJ ++ mktemp + local LAST_ERR=/tmp/tmp.VQ9dJhK1zO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-1597 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Do3nIT8BlJ namespace/users-1597 created + cat /tmp/tmp.VQ9dJhK1zO + rm /tmp/tmp.Do3nIT8BlJ /tmp/tmp.VQ9dJhK1zO + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ayjnLvTrSv +++ mktemp ++ local LAST_ERR=/tmp/tmp.XxZMV6bmF6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ayjnLvTrSv ++ cat /tmp/tmp.XxZMV6bmF6 ++ rm /tmp/tmp.ayjnLvTrSv /tmp/tmp.XxZMV6bmF6 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster3 --namespace=users-1597 ++ mktemp + local LAST_OUT=/tmp/tmp.EbjpVfQeXS ++ mktemp + local LAST_ERR=/tmp/tmp.noR2yPOhnj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster3 --namespace=users-1597 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EbjpVfQeXS Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster3" modified. + cat /tmp/tmp.noR2yPOhnj + rm /tmp/tmp.EbjpVfQeXS /tmp/tmp.noR2yPOhnj + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.rSs2saBQ79 ++ mktemp + local LAST_ERR=/tmp/tmp.6opY9ZNgRb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rSs2saBQ79 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.6opY9ZNgRb + rm /tmp/tmp.rSs2saBQ79 /tmp/tmp.6opY9ZNgRb + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.cpV5a3mvUt ++ mktemp + local LAST_ERR=/tmp/tmp.n6IuYNJBDL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cpV5a3mvUt secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.n6IuYNJBDL + rm /tmp/tmp.cpV5a3mvUt /tmp/tmp.n6IuYNJBDL + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/client.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2154-7a623b10#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-1597~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_OUT=/tmp/tmp.ZClzl9ecnr + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + local LAST_ERR=/tmp/tmp.bTOOnBH8HH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZClzl9ecnr deployment.apps/pxc-client created + cat /tmp/tmp.bTOOnBH8HH + rm /tmp/tmp.ZClzl9ecnr /tmp/tmp.bTOOnBH8HH + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.lDpVNAXIEB + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-1597~ + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2154-7a623b10#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_ERR=/tmp/tmp.BhVNccsqED + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lDpVNAXIEB perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.BhVNccsqED + rm /tmp/tmp.lDpVNAXIEB /tmp/tmp.BhVNccsqED + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.0hDmh7S7s3 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.TMOSojhlPH +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.0hDmh7S7s3 +++ cat /tmp/tmp.TMOSojhlPH +++ rm /tmp/tmp.0hDmh7S7s3 /tmp/tmp.TMOSojhlPH +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.UKV7sqm685 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.3jDcjPwAYv +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.UKV7sqm685 +++ cat /tmp/tmp.3jDcjPwAYv +++ rm /tmp/tmp.UKV7sqm685 /tmp/tmp.3jDcjPwAYv +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-1597 ++ mktemp + local LAST_OUT=/tmp/tmp.YAI3Sgd21K ++ mktemp + local LAST_ERR=/tmp/tmp.whnC8MhUMP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-1597 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-1597 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-1597 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.YAI3Sgd21K + cat /tmp/tmp.whnC8MhUMP error: no matching resources found + rm /tmp/tmp.YAI3Sgd21K /tmp/tmp.whnC8MhUMP + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-proxysql-0 ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.uVzmrelJL4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LpXjxfT2o4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uVzmrelJL4 ++ cat /tmp/tmp.LpXjxfT2o4 ++ rm /tmp/tmp.uVzmrelJL4 /tmp/tmp.LpXjxfT2o4 ++ return 0 + local root_pass=jBF1JX7ZYV3e+pHH + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' +++ mktemp ++ grep -E -o 'early-plugin-load=keyring_\w+.so' ++ local LAST_OUT=/tmp/tmp.sjVH59MsPX +++ mktemp ++ local LAST_ERR=/tmp/tmp.V9MCwLQwci ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sjVH59MsPX ++ cat /tmp/tmp.V9MCwLQwci Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.sjVH59MsPX /tmp/tmp.V9MCwLQwci ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gLBGkf9pLU +++ mktemp ++ local LAST_ERR=/tmp/tmp.Eei3cpteDt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gLBGkf9pLU ++ cat /tmp/tmp.Eei3cpteDt ++ rm /tmp/tmp.gLBGkf9pLU /tmp/tmp.Eei3cpteDt ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kVvP8Tt9wn +++ mktemp ++ local LAST_ERR=/tmp/tmp.VzCiE5tKL7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kVvP8Tt9wn ++ cat /tmp/tmp.VzCiE5tKL7 ++ rm /tmp/tmp.kVvP8Tt9wn /tmp/tmp.VzCiE5tKL7 ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yoYJKDMjkD +++ mktemp ++ local LAST_ERR=/tmp/tmp.l7thynjMcU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yoYJKDMjkD ++ cat /tmp/tmp.l7thynjMcU ++ rm /tmp/tmp.yoYJKDMjkD /tmp/tmp.l7thynjMcU ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql /tmp/tmp.ARylqDbRh4/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TojXITL8BM +++ mktemp ++ local LAST_ERR=/tmp/tmp.aCIySZEZGK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TojXITL8BM ++ cat /tmp/tmp.aCIySZEZGK ++ rm /tmp/tmp.TojXITL8BM /tmp/tmp.aCIySZEZGK ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql /tmp/tmp.ARylqDbRh4/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''jBF1JX7ZYV3e+pHH'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y3nCAJPvHN +++ mktemp ++ local LAST_ERR=/tmp/tmp.G2WOseanxV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y3nCAJPvHN ++ cat /tmp/tmp.G2WOseanxV ++ rm /tmp/tmp.y3nCAJPvHN /tmp/tmp.G2WOseanxV ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql /tmp/tmp.ARylqDbRh4/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Td4JjBc35e +++ mktemp ++ local LAST_ERR=/tmp/tmp.P2wcIsotAb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Td4JjBc35e ++ cat /tmp/tmp.P2wcIsotAb Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.Td4JjBc35e /tmp/tmp.P2wcIsotAb ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.5dn6VrcHN0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aEbxEAt1ab ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5dn6VrcHN0 ++ cat /tmp/tmp.aEbxEAt1ab ++ rm /tmp/tmp.5dn6VrcHN0 /tmp/tmp.aEbxEAt1ab ++ return 0 + secret_pass=jBF1JX7ZYV3e+pHH ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.qX3mrsYO8d +++ mktemp ++ local LAST_ERR=/tmp/tmp.JfX9wor6Bw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qX3mrsYO8d ++ cat /tmp/tmp.JfX9wor6Bw ++ rm /tmp/tmp.qX3mrsYO8d /tmp/tmp.JfX9wor6Bw ++ return 0 + int_secret_pass=jBF1JX7ZYV3e+pHH + [[ -z jBF1JX7ZYV3e+pHH ]] + [[ jBF1JX7ZYV3e+pHH != \j\B\F\1\J\X\7\Z\Y\V\3\e\+\p\H\H ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''jBF1JX7ZYV3e+pHH'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''jBF1JX7ZYV3e+pHH'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''jBF1JX7ZYV3e+pHH'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''jBF1JX7ZYV3e+pHH'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y2Jmi4eFXd +++ mktemp ++ local LAST_ERR=/tmp/tmp.T1DorvgYzH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y2Jmi4eFXd ++ cat /tmp/tmp.T1DorvgYzH ++ rm /tmp/tmp.y2Jmi4eFXd /tmp/tmp.T1DorvgYzH ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql /tmp/tmp.ARylqDbRh4/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.z8W5g6lOpM +++ mktemp ++ local LAST_ERR=/tmp/tmp.7HjKV0cNTY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z8W5g6lOpM ++ cat /tmp/tmp.7HjKV0cNTY ++ rm /tmp/tmp.z8W5g6lOpM /tmp/tmp.7HjKV0cNTY ++ return 0 + secret_pass='gK^Fj80Bgr.{kY*>' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.5z7TLfUZJQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.1AEqqDm83z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5z7TLfUZJQ ++ cat /tmp/tmp.1AEqqDm83z ++ rm /tmp/tmp.5z7TLfUZJQ /tmp/tmp.1AEqqDm83z ++ return 0 + int_secret_pass='gK^Fj80Bgr.{kY*>' + [[ -z gK^Fj80Bgr.{kY*> ]] + [[ gK^Fj80Bgr.{kY*> != \g\K\^\F\j\8\0\B\g\r\.\{\k\Y\*\> ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''gK^Fj80Bgr.{kY*>'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''gK^Fj80Bgr.{kY*>'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''gK^Fj80Bgr.{kY*>'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''gK^Fj80Bgr.{kY*>'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6p8olsa3sl +++ mktemp ++ local LAST_ERR=/tmp/tmp.gF2IxZytdB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6p8olsa3sl ++ cat /tmp/tmp.gF2IxZytdB ++ rm /tmp/tmp.6p8olsa3sl /tmp/tmp.gF2IxZytdB ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql /tmp/tmp.ARylqDbRh4/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.mHyFz0vFdK +++ mktemp ++ local LAST_ERR=/tmp/tmp.dwpmATA8Dv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mHyFz0vFdK ++ cat /tmp/tmp.dwpmATA8Dv ++ rm /tmp/tmp.mHyFz0vFdK /tmp/tmp.dwpmATA8Dv ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.fBlzBmFeBK +++ mktemp ++ local LAST_ERR=/tmp/tmp.UlSrqXEMao ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fBlzBmFeBK ++ cat /tmp/tmp.UlSrqXEMao ++ rm /tmp/tmp.fBlzBmFeBK /tmp/tmp.UlSrqXEMao ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hhoNM73sZZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.tpXvt5rHbT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hhoNM73sZZ ++ cat /tmp/tmp.tpXvt5rHbT ++ rm /tmp/tmp.hhoNM73sZZ /tmp/tmp.tpXvt5rHbT ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql /tmp/tmp.ARylqDbRh4/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.43UzvJMhI8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VY4pdqxnJ3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.43UzvJMhI8 ++ cat /tmp/tmp.VY4pdqxnJ3 ++ rm /tmp/tmp.43UzvJMhI8 /tmp/tmp.VY4pdqxnJ3 ++ return 0 + secret_pass='h)W}#?oGUij!fQ$g8' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.G56FDpNTwY +++ mktemp ++ local LAST_ERR=/tmp/tmp.dBud6R6Mhy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G56FDpNTwY ++ cat /tmp/tmp.dBud6R6Mhy ++ rm /tmp/tmp.G56FDpNTwY /tmp/tmp.dBud6R6Mhy ++ return 0 + int_secret_pass='h)W}#?oGUij!fQ$g8' + [[ -z h)W}#?oGUij!fQ$g8 ]] + [[ h)W}#?oGUij!fQ$g8 != \h\)\W\}\#\?\o\G\U\i\j\!\f\Q\$\g\8 ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''h)W}#?oGUij!fQ$g8'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''h)W}#?oGUij!fQ$g8'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''h)W}#?oGUij!fQ$g8'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''h)W}#?oGUij!fQ$g8'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql /tmp/tmp.ARylqDbRh4/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y6SgeACsXU +++ mktemp ++ local LAST_ERR=/tmp/tmp.kYn4LIMHLV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y6SgeACsXU ++ cat /tmp/tmp.kYn4LIMHLV ++ rm /tmp/tmp.Y6SgeACsXU /tmp/tmp.kYn4LIMHLV ++ return 0 + secret_pass=']WiGx1=6RlMv5)5{rT' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.yhyzEnd1Be +++ mktemp ++ local LAST_ERR=/tmp/tmp.siFeKOdZ9X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yhyzEnd1Be ++ cat /tmp/tmp.siFeKOdZ9X ++ rm /tmp/tmp.yhyzEnd1Be /tmp/tmp.siFeKOdZ9X ++ return 0 + int_secret_pass=']WiGx1=6RlMv5)5{rT' + [[ -z ]WiGx1=6RlMv5)5{rT ]] + [[ ]WiGx1=6RlMv5)5{rT != \]\W\i\G\x\1\=\6\R\l\M\v\5\)\5\{\r\T ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\'']WiGx1=6RlMv5)5{rT'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\'']WiGx1=6RlMv5)5{rT'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\'']WiGx1=6RlMv5)5{rT'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\'']WiGx1=6RlMv5)5{rT'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zDpDONi2XG +++ mktemp ++ local LAST_ERR=/tmp/tmp.aR9z2XhDow ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zDpDONi2XG ++ cat /tmp/tmp.aR9z2XhDow ++ rm /tmp/tmp.zDpDONi2XG /tmp/tmp.aR9z2XhDow ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql /tmp/tmp.ARylqDbRh4/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.zi5iS6Y3M0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mboEgO65Bg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zi5iS6Y3M0 ++ cat /tmp/tmp.mboEgO65Bg ++ rm /tmp/tmp.zi5iS6Y3M0 /tmp/tmp.mboEgO65Bg ++ return 0 + secret_pass='9Nk&2xXPEK]QM^uD$' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.2J3qnhjb3N +++ mktemp ++ local LAST_ERR=/tmp/tmp.4dF3EQsjAI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2J3qnhjb3N ++ cat /tmp/tmp.4dF3EQsjAI ++ rm /tmp/tmp.2J3qnhjb3N /tmp/tmp.4dF3EQsjAI ++ return 0 + int_secret_pass='9Nk&2xXPEK]QM^uD$' + [[ -z 9Nk&2xXPEK]QM^uD$ ]] + [[ 9Nk&2xXPEK]QM^uD$ != \9\N\k\&\2\x\X\P\E\K\]\Q\M\^\u\D\$ ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''9Nk&2xXPEK]QM^uD$'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''9Nk&2xXPEK]QM^uD$'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''9Nk&2xXPEK]QM^uD$'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''9Nk&2xXPEK]QM^uD$'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L9WHN9T6zr +++ mktemp ++ local LAST_ERR=/tmp/tmp.i9KQlQYpjJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L9WHN9T6zr ++ cat /tmp/tmp.i9KQlQYpjJ ++ rm /tmp/tmp.L9WHN9T6zr /tmp/tmp.i9KQlQYpjJ ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql /tmp/tmp.ARylqDbRh4/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jA497eEprU ++ mktemp + local LAST_ERR=/tmp/tmp.FL55nUokD9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jA497eEprU secret/my-cluster-secrets patched + cat /tmp/tmp.FL55nUokD9 + rm /tmp/tmp.jA497eEprU /tmp/tmp.FL55nUokD9 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6Od93Beqm1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8zvBrut5l7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6Od93Beqm1 ++ cat /tmp/tmp.8zvBrut5l7 ++ rm /tmp/tmp.6Od93Beqm1 /tmp/tmp.8zvBrut5l7 ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql /tmp/tmp.ARylqDbRh4/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.lA3HJ1tU1B ++ mktemp + local LAST_ERR=/tmp/tmp.ceM2WDp2Ol + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lA3HJ1tU1B perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.ceM2WDp2Ol + rm /tmp/tmp.lA3HJ1tU1B /tmp/tmp.ceM2WDp2Ol + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y3FRMtoRk9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.V4s5vkzIvb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y3FRMtoRk9 ++ cat /tmp/tmp.V4s5vkzIvb ++ rm /tmp/tmp.y3FRMtoRk9 /tmp/tmp.V4s5vkzIvb ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zq8Ki6lj6e +++ mktemp ++ local LAST_ERR=/tmp/tmp.DbKwL1S0JH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zq8Ki6lj6e ++ cat /tmp/tmp.DbKwL1S0JH ++ rm /tmp/tmp.Zq8Ki6lj6e /tmp/tmp.DbKwL1S0JH ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.zFL1qTErMj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.gabOLIKuiB +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.zFL1qTErMj +++++ cat /tmp/tmp.gabOLIKuiB +++++ rm /tmp/tmp.zFL1qTErMj /tmp/tmp.gabOLIKuiB +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ElJ4LdSz25 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.AxaEfXJbrM +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ElJ4LdSz25 +++++ cat /tmp/tmp.AxaEfXJbrM +++++ rm /tmp/tmp.ElJ4LdSz25 /tmp/tmp.AxaEfXJbrM +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ukJAbYUHgE +++ mktemp ++ local LAST_ERR=/tmp/tmp.VezSmA5uGL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ukJAbYUHgE ++ cat /tmp/tmp.VezSmA5uGL ++ rm /tmp/tmp.ukJAbYUHgE /tmp/tmp.VezSmA5uGL ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.gbVs5fzoAF ++ mktemp + local LAST_ERR=/tmp/tmp.dgzJQzQNNd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gbVs5fzoAF secret/my-cluster-secrets patched + cat /tmp/tmp.dgzJQzQNNd + rm /tmp/tmp.gbVs5fzoAF /tmp/tmp.dgzJQzQNNd + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cgdorJ2IaB +++ mktemp ++ local LAST_ERR=/tmp/tmp.icH9r8zik6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cgdorJ2IaB ++ cat /tmp/tmp.icH9r8zik6 ++ rm /tmp/tmp.cgdorJ2IaB /tmp/tmp.icH9r8zik6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RMrLqekKT3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.f0ZGJL5R4X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RMrLqekKT3 ++ cat /tmp/tmp.f0ZGJL5R4X ++ rm /tmp/tmp.RMrLqekKT3 /tmp/tmp.f0ZGJL5R4X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8KHUR4EtMx +++ mktemp ++ local LAST_ERR=/tmp/tmp.xUVXTMPgOl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8KHUR4EtMx ++ cat /tmp/tmp.xUVXTMPgOl ++ rm /tmp/tmp.8KHUR4EtMx /tmp/tmp.xUVXTMPgOl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wJAnaX7iym +++ mktemp ++ local LAST_ERR=/tmp/tmp.yM9WDMFrdy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wJAnaX7iym ++ cat /tmp/tmp.yM9WDMFrdy ++ rm /tmp/tmp.wJAnaX7iym /tmp/tmp.yM9WDMFrdy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9XBjUpJWPt +++ mktemp ++ local LAST_ERR=/tmp/tmp.9RwfLsdpbb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9XBjUpJWPt ++ cat /tmp/tmp.9RwfLsdpbb ++ rm /tmp/tmp.9XBjUpJWPt /tmp/tmp.9RwfLsdpbb ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sIGcOaIYZV +++ mktemp ++ local LAST_ERR=/tmp/tmp.quFZ4TSDfn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sIGcOaIYZV ++ cat /tmp/tmp.quFZ4TSDfn ++ rm /tmp/tmp.sIGcOaIYZV /tmp/tmp.quFZ4TSDfn ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qdfvMG4OLF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mSt8Dv4Qmw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qdfvMG4OLF +++++ cat /tmp/tmp.mSt8Dv4Qmw +++++ rm /tmp/tmp.qdfvMG4OLF /tmp/tmp.mSt8Dv4Qmw +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.NAWwTYjROb ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UrvXKAMf63 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.NAWwTYjROb +++++ cat /tmp/tmp.UrvXKAMf63 +++++ rm /tmp/tmp.NAWwTYjROb /tmp/tmp.UrvXKAMf63 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mqzSgjv80S +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bck6hlv6QA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mqzSgjv80S ++ cat /tmp/tmp.Bck6hlv6QA ++ rm /tmp/tmp.mqzSgjv80S /tmp/tmp.Bck6hlv6QA ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql /tmp/tmp.ARylqDbRh4/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql /tmp/tmp.ARylqDbRh4/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql /tmp/tmp.ARylqDbRh4/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.SepJ8qfuP6 ++ mktemp + local LAST_ERR=/tmp/tmp.yN9lAC7VLE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SepJ8qfuP6 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.yN9lAC7VLE + rm /tmp/tmp.SepJ8qfuP6 /tmp/tmp.yN9lAC7VLE + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ojCEcXj6KT ++ mktemp + local LAST_ERR=/tmp/tmp.4Wc2ltmSHs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ojCEcXj6KT secret/my-cluster-secrets patched + cat /tmp/tmp.4Wc2ltmSHs + rm /tmp/tmp.ojCEcXj6KT /tmp/tmp.4Wc2ltmSHs + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WgQgTOUB00 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RsngSYDZmX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WgQgTOUB00 ++ cat /tmp/tmp.RsngSYDZmX ++ rm /tmp/tmp.WgQgTOUB00 /tmp/tmp.RsngSYDZmX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.drPLNmVfga +++ mktemp ++ local LAST_ERR=/tmp/tmp.zQtgnqayfz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.drPLNmVfga ++ cat /tmp/tmp.zQtgnqayfz ++ rm /tmp/tmp.drPLNmVfga /tmp/tmp.zQtgnqayfz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gtFuFCkhG2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.w8HeuhLrsJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gtFuFCkhG2 ++ cat /tmp/tmp.w8HeuhLrsJ ++ rm /tmp/tmp.gtFuFCkhG2 /tmp/tmp.w8HeuhLrsJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z2W3ELIAn0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PyHAZIliOO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z2W3ELIAn0 ++ cat /tmp/tmp.PyHAZIliOO ++ rm /tmp/tmp.z2W3ELIAn0 /tmp/tmp.PyHAZIliOO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y9rrV1aVsW +++ mktemp ++ local LAST_ERR=/tmp/tmp.VQuRqJef9o ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y9rrV1aVsW ++ cat /tmp/tmp.VQuRqJef9o ++ rm /tmp/tmp.Y9rrV1aVsW /tmp/tmp.VQuRqJef9o ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rOdJuc1UY6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VRVNqYwucu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rOdJuc1UY6 ++ cat /tmp/tmp.VRVNqYwucu ++ rm /tmp/tmp.rOdJuc1UY6 /tmp/tmp.VRVNqYwucu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IdS4It6ix9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rrC2bupDGF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IdS4It6ix9 ++ cat /tmp/tmp.rrC2bupDGF ++ rm /tmp/tmp.IdS4It6ix9 /tmp/tmp.rrC2bupDGF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NptFnjXHQH +++ mktemp ++ local LAST_ERR=/tmp/tmp.mo7kRdoivY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NptFnjXHQH ++ cat /tmp/tmp.mo7kRdoivY ++ rm /tmp/tmp.NptFnjXHQH /tmp/tmp.mo7kRdoivY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SVLuReRe4d +++ mktemp ++ local LAST_ERR=/tmp/tmp.GzpC5Aa5UC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SVLuReRe4d ++ cat /tmp/tmp.GzpC5Aa5UC ++ rm /tmp/tmp.SVLuReRe4d /tmp/tmp.GzpC5Aa5UC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.erExQqEEPx +++ mktemp ++ local LAST_ERR=/tmp/tmp.ntASusrY9q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.erExQqEEPx ++ cat /tmp/tmp.ntASusrY9q ++ rm /tmp/tmp.erExQqEEPx /tmp/tmp.ntASusrY9q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qm6qDSKTxU +++ mktemp ++ local LAST_ERR=/tmp/tmp.uSneYdlzsk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qm6qDSKTxU ++ cat /tmp/tmp.uSneYdlzsk ++ rm /tmp/tmp.Qm6qDSKTxU /tmp/tmp.uSneYdlzsk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6hG79TpN3S +++ mktemp ++ local LAST_ERR=/tmp/tmp.CcA2KQt78L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6hG79TpN3S ++ cat /tmp/tmp.CcA2KQt78L ++ rm /tmp/tmp.6hG79TpN3S /tmp/tmp.CcA2KQt78L ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oLrQ7MxeJx +++ mktemp ++ local LAST_ERR=/tmp/tmp.zqZliLvxmj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oLrQ7MxeJx ++ cat /tmp/tmp.zqZliLvxmj ++ rm /tmp/tmp.oLrQ7MxeJx /tmp/tmp.zqZliLvxmj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DBULfgwjXU +++ mktemp ++ local LAST_ERR=/tmp/tmp.DJhzHjHvbn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DBULfgwjXU ++ cat /tmp/tmp.DJhzHjHvbn ++ rm /tmp/tmp.DBULfgwjXU /tmp/tmp.DJhzHjHvbn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MprAVNY1yp +++ mktemp ++ local LAST_ERR=/tmp/tmp.B5sSOQJnW2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MprAVNY1yp ++ cat /tmp/tmp.B5sSOQJnW2 ++ rm /tmp/tmp.MprAVNY1yp /tmp/tmp.B5sSOQJnW2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D1ZR0DQewt +++ mktemp ++ local LAST_ERR=/tmp/tmp.S4bkBH6zg3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D1ZR0DQewt ++ cat /tmp/tmp.S4bkBH6zg3 ++ rm /tmp/tmp.D1ZR0DQewt /tmp/tmp.S4bkBH6zg3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Eo2P4ADp1b +++ mktemp ++ local LAST_ERR=/tmp/tmp.CX8bywT6nj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Eo2P4ADp1b ++ cat /tmp/tmp.CX8bywT6nj ++ rm /tmp/tmp.Eo2P4ADp1b /tmp/tmp.CX8bywT6nj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J6Hm02ipLX +++ mktemp ++ local LAST_ERR=/tmp/tmp.xD7eEN8oiB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J6Hm02ipLX ++ cat /tmp/tmp.xD7eEN8oiB ++ rm /tmp/tmp.J6Hm02ipLX /tmp/tmp.xD7eEN8oiB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IpXrAoNuo5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hrVLdpJv0H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IpXrAoNuo5 ++ cat /tmp/tmp.hrVLdpJv0H ++ rm /tmp/tmp.IpXrAoNuo5 /tmp/tmp.hrVLdpJv0H ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oCzQhHzDSd +++ mktemp ++ local LAST_ERR=/tmp/tmp.2CfsCtOFwU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oCzQhHzDSd ++ cat /tmp/tmp.2CfsCtOFwU ++ rm /tmp/tmp.oCzQhHzDSd /tmp/tmp.2CfsCtOFwU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jgM6j5gMqr +++ mktemp ++ local LAST_ERR=/tmp/tmp.v94IwRuGAH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jgM6j5gMqr ++ cat /tmp/tmp.v94IwRuGAH ++ rm /tmp/tmp.jgM6j5gMqr /tmp/tmp.v94IwRuGAH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wPT8I83inP +++ mktemp ++ local LAST_ERR=/tmp/tmp.nbsNvM9MnT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wPT8I83inP ++ cat /tmp/tmp.nbsNvM9MnT ++ rm /tmp/tmp.wPT8I83inP /tmp/tmp.nbsNvM9MnT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h8R9iWQsqR +++ mktemp ++ local LAST_ERR=/tmp/tmp.9wN4EqFp5v ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h8R9iWQsqR ++ cat /tmp/tmp.9wN4EqFp5v ++ rm /tmp/tmp.h8R9iWQsqR /tmp/tmp.9wN4EqFp5v ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RtBOmX9cZA +++ mktemp ++ local LAST_ERR=/tmp/tmp.YOqbRmeaDd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RtBOmX9cZA ++ cat /tmp/tmp.YOqbRmeaDd ++ rm /tmp/tmp.RtBOmX9cZA /tmp/tmp.YOqbRmeaDd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Cv3vHuFJx +++ mktemp ++ local LAST_ERR=/tmp/tmp.3ImSXgsxF2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4Cv3vHuFJx ++ cat /tmp/tmp.3ImSXgsxF2 ++ rm /tmp/tmp.4Cv3vHuFJx /tmp/tmp.3ImSXgsxF2 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gRPZt1DdmY +++ mktemp ++ local LAST_ERR=/tmp/tmp.huLmsJXtaB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gRPZt1DdmY ++ cat /tmp/tmp.huLmsJXtaB ++ rm /tmp/tmp.gRPZt1DdmY /tmp/tmp.huLmsJXtaB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kluQ0ZJCij ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.cC6zeAcTmo +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kluQ0ZJCij +++++ cat /tmp/tmp.cC6zeAcTmo +++++ rm /tmp/tmp.kluQ0ZJCij /tmp/tmp.cC6zeAcTmo +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.r8iVsQ8DDg ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bWrn3WXn66 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.r8iVsQ8DDg +++++ cat /tmp/tmp.bWrn3WXn66 +++++ rm /tmp/tmp.r8iVsQ8DDg /tmp/tmp.bWrn3WXn66 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w0KJEj85IQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.3GvoZ3Ne0i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w0KJEj85IQ ++ cat /tmp/tmp.3GvoZ3Ne0i ++ rm /tmp/tmp.w0KJEj85IQ /tmp/tmp.3GvoZ3Ne0i ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3.sql /tmp/tmp.ARylqDbRh4/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Qiz0RmkHKR ++ mktemp + local LAST_ERR=/tmp/tmp.J8lubggzVd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Qiz0RmkHKR secret/my-cluster-secrets patched + cat /tmp/tmp.J8lubggzVd + rm /tmp/tmp.Qiz0RmkHKR /tmp/tmp.J8lubggzVd + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.QSTzZrYt5D +++ mktemp ++ local LAST_ERR=/tmp/tmp.4G8L5ynA7f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QSTzZrYt5D ++ cat /tmp/tmp.4G8L5ynA7f ++ rm /tmp/tmp.QSTzZrYt5D /tmp/tmp.4G8L5ynA7f ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gzbuWDkbMU +++ mktemp ++ local LAST_ERR=/tmp/tmp.ud5fnVaq1S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gzbuWDkbMU ++ cat /tmp/tmp.ud5fnVaq1S ++ rm /tmp/tmp.gzbuWDkbMU /tmp/tmp.ud5fnVaq1S ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wlvEYdsw5o +++ mktemp ++ local LAST_ERR=/tmp/tmp.VutSzqfzna ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wlvEYdsw5o ++ cat /tmp/tmp.VutSzqfzna ++ rm /tmp/tmp.wlvEYdsw5o /tmp/tmp.VutSzqfzna ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.syXf9rpfrz +++ mktemp ++ local LAST_ERR=/tmp/tmp.TlzT9fl07N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.syXf9rpfrz ++ cat /tmp/tmp.TlzT9fl07N ++ rm /tmp/tmp.syXf9rpfrz /tmp/tmp.TlzT9fl07N ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vfti3iZpDk +++ mktemp ++ local LAST_ERR=/tmp/tmp.bKkMCPSwHX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vfti3iZpDk ++ cat /tmp/tmp.bKkMCPSwHX ++ rm /tmp/tmp.Vfti3iZpDk /tmp/tmp.bKkMCPSwHX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E7lhWUS9A8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.82mQGf2CgL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E7lhWUS9A8 ++ cat /tmp/tmp.82mQGf2CgL ++ rm /tmp/tmp.E7lhWUS9A8 /tmp/tmp.82mQGf2CgL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qZRHm08HI5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Spb8TV8EOS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qZRHm08HI5 ++ cat /tmp/tmp.Spb8TV8EOS ++ rm /tmp/tmp.qZRHm08HI5 /tmp/tmp.Spb8TV8EOS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WsMVRCnV9d +++ mktemp ++ local LAST_ERR=/tmp/tmp.ujhttbNNYv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WsMVRCnV9d ++ cat /tmp/tmp.ujhttbNNYv ++ rm /tmp/tmp.WsMVRCnV9d /tmp/tmp.ujhttbNNYv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.djNmN6JExq +++ mktemp ++ local LAST_ERR=/tmp/tmp.CK1uEn7lRB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.djNmN6JExq ++ cat /tmp/tmp.CK1uEn7lRB ++ rm /tmp/tmp.djNmN6JExq /tmp/tmp.CK1uEn7lRB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RmNvNxsrkh +++ mktemp ++ local LAST_ERR=/tmp/tmp.O8CoS2evv0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RmNvNxsrkh ++ cat /tmp/tmp.O8CoS2evv0 ++ rm /tmp/tmp.RmNvNxsrkh /tmp/tmp.O8CoS2evv0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ut4Ne7HswZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZCxLqQ0fP9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ut4Ne7HswZ ++ cat /tmp/tmp.ZCxLqQ0fP9 ++ rm /tmp/tmp.ut4Ne7HswZ /tmp/tmp.ZCxLqQ0fP9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lUGG4wdDYv +++ mktemp ++ local LAST_ERR=/tmp/tmp.Yguq1mVKB0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lUGG4wdDYv ++ cat /tmp/tmp.Yguq1mVKB0 ++ rm /tmp/tmp.lUGG4wdDYv /tmp/tmp.Yguq1mVKB0 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OmHYv52G07 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2WGKoU3C4O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OmHYv52G07 ++ cat /tmp/tmp.2WGKoU3C4O ++ rm /tmp/tmp.OmHYv52G07 /tmp/tmp.2WGKoU3C4O ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.VjXabTy5T9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.g8xpebZiC2 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.VjXabTy5T9 +++++ cat /tmp/tmp.g8xpebZiC2 +++++ rm /tmp/tmp.VjXabTy5T9 /tmp/tmp.g8xpebZiC2 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xDy19CWmUl ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jvSkv7ZEkn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xDy19CWmUl +++++ cat /tmp/tmp.jvSkv7ZEkn +++++ rm /tmp/tmp.xDy19CWmUl /tmp/tmp.jvSkv7ZEkn +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XgE4TzB2j3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xb92x6iY3J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XgE4TzB2j3 ++ cat /tmp/tmp.xb92x6iY3J ++ rm /tmp/tmp.XgE4TzB2j3 /tmp/tmp.xb92x6iY3J ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y6LLoVCv95 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZnzQjsG8h8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y6LLoVCv95 ++ cat /tmp/tmp.ZnzQjsG8h8 ++ rm /tmp/tmp.Y6LLoVCv95 /tmp/tmp.ZnzQjsG8h8 ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql /tmp/tmp.ARylqDbRh4/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.GAr9i1w3pB ++ mktemp + local LAST_ERR=/tmp/tmp.oYzIZ2GD1H + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GAr9i1w3pB secret/my-cluster-secrets patched + cat /tmp/tmp.oYzIZ2GD1H + rm /tmp/tmp.GAr9i1w3pB /tmp/tmp.oYzIZ2GD1H + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KPCEMBbtEl +++ mktemp ++ local LAST_ERR=/tmp/tmp.xrLs1yFDM2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KPCEMBbtEl ++ cat /tmp/tmp.xrLs1yFDM2 ++ rm /tmp/tmp.KPCEMBbtEl /tmp/tmp.xrLs1yFDM2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qIP8lfRNcr +++ mktemp ++ local LAST_ERR=/tmp/tmp.hGYaEMheLI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qIP8lfRNcr ++ cat /tmp/tmp.hGYaEMheLI ++ rm /tmp/tmp.qIP8lfRNcr /tmp/tmp.hGYaEMheLI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1k8JTq2Tgl +++ mktemp ++ local LAST_ERR=/tmp/tmp.xUAoveQosV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1k8JTq2Tgl ++ cat /tmp/tmp.xUAoveQosV ++ rm /tmp/tmp.1k8JTq2Tgl /tmp/tmp.xUAoveQosV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.btSbTpRcx4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.A7gUhHJFjW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.btSbTpRcx4 ++ cat /tmp/tmp.A7gUhHJFjW ++ rm /tmp/tmp.btSbTpRcx4 /tmp/tmp.A7gUhHJFjW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KD9y1C32Yq +++ mktemp ++ local LAST_ERR=/tmp/tmp.c1Y7ibj5Ss ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KD9y1C32Yq ++ cat /tmp/tmp.c1Y7ibj5Ss ++ rm /tmp/tmp.KD9y1C32Yq /tmp/tmp.c1Y7ibj5Ss ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HeXCK3UHsW +++ mktemp ++ local LAST_ERR=/tmp/tmp.BQbDfx02Hh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HeXCK3UHsW ++ cat /tmp/tmp.BQbDfx02Hh ++ rm /tmp/tmp.HeXCK3UHsW /tmp/tmp.BQbDfx02Hh ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.IVeenwQeR7 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.PpHfzJLCjX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.IVeenwQeR7 +++++ cat /tmp/tmp.PpHfzJLCjX +++++ rm /tmp/tmp.IVeenwQeR7 /tmp/tmp.PpHfzJLCjX +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qgpLdFF2Pj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.98reAABMss +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qgpLdFF2Pj +++++ cat /tmp/tmp.98reAABMss +++++ rm /tmp/tmp.qgpLdFF2Pj /tmp/tmp.98reAABMss +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f82MHLZTcZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ySivW7pya4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f82MHLZTcZ ++ cat /tmp/tmp.ySivW7pya4 ++ rm /tmp/tmp.f82MHLZTcZ /tmp/tmp.ySivW7pya4 ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pz6Mu7VHKQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.9YgmeD6T7l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pz6Mu7VHKQ ++ cat /tmp/tmp.9YgmeD6T7l ++ rm /tmp/tmp.pz6Mu7VHKQ /tmp/tmp.9YgmeD6T7l ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql /tmp/tmp.ARylqDbRh4/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.LmHIWjqgLe ++ mktemp + local LAST_ERR=/tmp/tmp.O5p4mpfRNm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LmHIWjqgLe perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.O5p4mpfRNm + rm /tmp/tmp.LmHIWjqgLe /tmp/tmp.O5p4mpfRNm + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PAWC4RLarg +++ mktemp ++ local LAST_ERR=/tmp/tmp.P80XMaKBaV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PAWC4RLarg ++ cat /tmp/tmp.P80XMaKBaV ++ rm /tmp/tmp.PAWC4RLarg /tmp/tmp.P80XMaKBaV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z2LoaxLKqA +++ mktemp ++ local LAST_ERR=/tmp/tmp.lKMJul9vkm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z2LoaxLKqA ++ cat /tmp/tmp.lKMJul9vkm ++ rm /tmp/tmp.Z2LoaxLKqA /tmp/tmp.lKMJul9vkm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j02uEYlPP8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1sQf7p7dFZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j02uEYlPP8 ++ cat /tmp/tmp.1sQf7p7dFZ ++ rm /tmp/tmp.j02uEYlPP8 /tmp/tmp.1sQf7p7dFZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wl5t9utI1Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.6OLprWn7CZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wl5t9utI1Y ++ cat /tmp/tmp.6OLprWn7CZ ++ rm /tmp/tmp.wl5t9utI1Y /tmp/tmp.6OLprWn7CZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Se8RcQpy9F +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZA4v7CzbO9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Se8RcQpy9F ++ cat /tmp/tmp.ZA4v7CzbO9 ++ rm /tmp/tmp.Se8RcQpy9F /tmp/tmp.ZA4v7CzbO9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fQFNKsnL9g +++ mktemp ++ local LAST_ERR=/tmp/tmp.g8pnSDGTyG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fQFNKsnL9g ++ cat /tmp/tmp.g8pnSDGTyG ++ rm /tmp/tmp.fQFNKsnL9g /tmp/tmp.g8pnSDGTyG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M6Uq5l5eJH +++ mktemp ++ local LAST_ERR=/tmp/tmp.lmM9dseTXh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M6Uq5l5eJH ++ cat /tmp/tmp.lmM9dseTXh ++ rm /tmp/tmp.M6Uq5l5eJH /tmp/tmp.lmM9dseTXh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FsqgVmvLV1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tLiAkNGOlL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FsqgVmvLV1 ++ cat /tmp/tmp.tLiAkNGOlL ++ rm /tmp/tmp.FsqgVmvLV1 /tmp/tmp.tLiAkNGOlL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gsrl0ueoSI +++ mktemp ++ local LAST_ERR=/tmp/tmp.2yJtFVDvnq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gsrl0ueoSI ++ cat /tmp/tmp.2yJtFVDvnq ++ rm /tmp/tmp.gsrl0ueoSI /tmp/tmp.2yJtFVDvnq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UfZoiqM58W +++ mktemp ++ local LAST_ERR=/tmp/tmp.2mOBtUDico ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UfZoiqM58W ++ cat /tmp/tmp.2mOBtUDico ++ rm /tmp/tmp.UfZoiqM58W /tmp/tmp.2mOBtUDico ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mRsYV09cFy +++ mktemp ++ local LAST_ERR=/tmp/tmp.miMtamUKtm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mRsYV09cFy ++ cat /tmp/tmp.miMtamUKtm ++ rm /tmp/tmp.mRsYV09cFy /tmp/tmp.miMtamUKtm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LPi27toSra +++ mktemp ++ local LAST_ERR=/tmp/tmp.5uyy3En78D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LPi27toSra ++ cat /tmp/tmp.5uyy3En78D ++ rm /tmp/tmp.LPi27toSra /tmp/tmp.5uyy3En78D ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R0rsmqz4Vb +++ mktemp ++ local LAST_ERR=/tmp/tmp.yhBb5yGgyA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.R0rsmqz4Vb ++ cat /tmp/tmp.yhBb5yGgyA ++ rm /tmp/tmp.R0rsmqz4Vb /tmp/tmp.yhBb5yGgyA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dABKV4NQle +++ mktemp ++ local LAST_ERR=/tmp/tmp.DcJ60EosjW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dABKV4NQle ++ cat /tmp/tmp.DcJ60EosjW ++ rm /tmp/tmp.dABKV4NQle /tmp/tmp.DcJ60EosjW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6JCN8XfImS +++ mktemp ++ local LAST_ERR=/tmp/tmp.8VD7S0WsWv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6JCN8XfImS ++ cat /tmp/tmp.8VD7S0WsWv ++ rm /tmp/tmp.6JCN8XfImS /tmp/tmp.8VD7S0WsWv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oPDpMkl2uB +++ mktemp ++ local LAST_ERR=/tmp/tmp.1A8cOqBXYC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oPDpMkl2uB ++ cat /tmp/tmp.1A8cOqBXYC ++ rm /tmp/tmp.oPDpMkl2uB /tmp/tmp.1A8cOqBXYC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y7K2R1dN93 +++ mktemp ++ local LAST_ERR=/tmp/tmp.O7dsP1pLeG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y7K2R1dN93 ++ cat /tmp/tmp.O7dsP1pLeG ++ rm /tmp/tmp.Y7K2R1dN93 /tmp/tmp.O7dsP1pLeG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8JXMLPcZxZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.3DdqZZtC4V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8JXMLPcZxZ ++ cat /tmp/tmp.3DdqZZtC4V ++ rm /tmp/tmp.8JXMLPcZxZ /tmp/tmp.3DdqZZtC4V ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ldzEG8P7oB +++ mktemp ++ local LAST_ERR=/tmp/tmp.7c1ZcvY3AU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ldzEG8P7oB ++ cat /tmp/tmp.7c1ZcvY3AU ++ rm /tmp/tmp.ldzEG8P7oB /tmp/tmp.7c1ZcvY3AU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SaepZrE8X2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BX3QABrUOQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SaepZrE8X2 ++ cat /tmp/tmp.BX3QABrUOQ ++ rm /tmp/tmp.SaepZrE8X2 /tmp/tmp.BX3QABrUOQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xVnQOiALsy +++ mktemp ++ local LAST_ERR=/tmp/tmp.OOgvYvZBDB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xVnQOiALsy ++ cat /tmp/tmp.OOgvYvZBDB ++ rm /tmp/tmp.xVnQOiALsy /tmp/tmp.OOgvYvZBDB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6LJ5TDBQxv +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZRkR8bhZYU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6LJ5TDBQxv ++ cat /tmp/tmp.ZRkR8bhZYU ++ rm /tmp/tmp.6LJ5TDBQxv /tmp/tmp.ZRkR8bhZYU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2ADtIWD7No +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZmJQJ72UsE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2ADtIWD7No ++ cat /tmp/tmp.ZmJQJ72UsE ++ rm /tmp/tmp.2ADtIWD7No /tmp/tmp.ZmJQJ72UsE ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5R7HDwSi0D ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.aRSkAoC5bY +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5R7HDwSi0D +++++ cat /tmp/tmp.aRSkAoC5bY +++++ rm /tmp/tmp.5R7HDwSi0D /tmp/tmp.aRSkAoC5bY +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jEtkm4eHQb ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Qi3GL1OSdN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jEtkm4eHQb +++++ cat /tmp/tmp.Qi3GL1OSdN +++++ rm /tmp/tmp.jEtkm4eHQb /tmp/tmp.Qi3GL1OSdN +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lyA01I45oR +++ mktemp ++ local LAST_ERR=/tmp/tmp.sD4ZBHUV2N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lyA01I45oR ++ cat /tmp/tmp.sD4ZBHUV2N ++ rm /tmp/tmp.lyA01I45oR /tmp/tmp.sD4ZBHUV2N ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.lMUz4lv8j3 ++ mktemp + local LAST_ERR=/tmp/tmp.o1DtWKFCDt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lMUz4lv8j3 secret/my-cluster-secrets-2 patched + cat /tmp/tmp.o1DtWKFCDt + rm /tmp/tmp.lMUz4lv8j3 /tmp/tmp.o1DtWKFCDt + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zWmDKMQngl +++ mktemp ++ local LAST_ERR=/tmp/tmp.5R3HS9DKqU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zWmDKMQngl ++ cat /tmp/tmp.5R3HS9DKqU ++ rm /tmp/tmp.zWmDKMQngl /tmp/tmp.5R3HS9DKqU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1ui1UTQ8OB +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vq4xZneQkH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1ui1UTQ8OB ++ cat /tmp/tmp.Vq4xZneQkH ++ rm /tmp/tmp.1ui1UTQ8OB /tmp/tmp.Vq4xZneQkH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Elnuomv4oX +++ mktemp ++ local LAST_ERR=/tmp/tmp.bQruqzNrCJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Elnuomv4oX ++ cat /tmp/tmp.bQruqzNrCJ ++ rm /tmp/tmp.Elnuomv4oX /tmp/tmp.bQruqzNrCJ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ABx1pbpX8F +++ mktemp ++ local LAST_ERR=/tmp/tmp.OElUQqPdpM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ABx1pbpX8F ++ cat /tmp/tmp.OElUQqPdpM ++ rm /tmp/tmp.ABx1pbpX8F /tmp/tmp.OElUQqPdpM ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ZLXqftzVKM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SRfOyAp8Fl +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ZLXqftzVKM +++++ cat /tmp/tmp.SRfOyAp8Fl +++++ rm /tmp/tmp.ZLXqftzVKM /tmp/tmp.SRfOyAp8Fl +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7YW2yI3Ll9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.KKExC12n81 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7YW2yI3Ll9 +++++ cat /tmp/tmp.KKExC12n81 +++++ rm /tmp/tmp.7YW2yI3Ll9 /tmp/tmp.KKExC12n81 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5gsIktvsFT +++ mktemp ++ local LAST_ERR=/tmp/tmp.hwgOlomiPc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5gsIktvsFT ++ cat /tmp/tmp.hwgOlomiPc ++ rm /tmp/tmp.5gsIktvsFT /tmp/tmp.hwgOlomiPc ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fhKn4vrzxe +++ mktemp ++ local LAST_ERR=/tmp/tmp.JOjN20SjmK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fhKn4vrzxe ++ cat /tmp/tmp.JOjN20SjmK ++ rm /tmp/tmp.fhKn4vrzxe /tmp/tmp.JOjN20SjmK ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql /tmp/tmp.ARylqDbRh4/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.nKsZBzUtRT +++ mktemp ++ local LAST_ERR=/tmp/tmp.Cjfmf15UHT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nKsZBzUtRT ++ cat /tmp/tmp.Cjfmf15UHT ++ rm /tmp/tmp.nKsZBzUtRT /tmp/tmp.Cjfmf15UHT ++ return 0 + newpass='&ew.Ev1G[j_s4996' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''&ew.Ev1G[j_s4996'\'';' '-h some-name-pxc -uroot -p'\''&ew.Ev1G[j_s4996'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''&ew.Ev1G[j_s4996'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''&ew.Ev1G[j_s4996'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qLc8MO7NHe +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q2g879AjWr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qLc8MO7NHe ++ cat /tmp/tmp.Q2g879AjWr ++ rm /tmp/tmp.qLc8MO7NHe /tmp/tmp.Q2g879AjWr ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''&ew.Ev1G[j_s4996'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''&ew.Ev1G[j_s4996'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''&ew.Ev1G[j_s4996'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''&ew.Ev1G[j_s4996'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HJDusNgY0l +++ mktemp ++ local LAST_ERR=/tmp/tmp.02JTPWG7OF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HJDusNgY0l ++ cat /tmp/tmp.02JTPWG7OF ++ rm /tmp/tmp.HJDusNgY0l /tmp/tmp.02JTPWG7OF ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql /tmp/tmp.ARylqDbRh4/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.sWGJT83TtK +++ mktemp ++ local LAST_ERR=/tmp/tmp.FeRPXWNOqN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sWGJT83TtK ++ cat /tmp/tmp.FeRPXWNOqN ++ rm /tmp/tmp.sWGJT83TtK /tmp/tmp.FeRPXWNOqN ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.8U1mmbe8MS ++ mktemp + local LAST_ERR=/tmp/tmp.T0OgTaU9wX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8U1mmbe8MS secret/my-cluster-secrets-2 configured + cat /tmp/tmp.T0OgTaU9wX Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.8U1mmbe8MS /tmp/tmp.T0OgTaU9wX + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yuPU7WGu0I +++ mktemp ++ local LAST_ERR=/tmp/tmp.LTWIvldxZN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yuPU7WGu0I ++ cat /tmp/tmp.LTWIvldxZN ++ rm /tmp/tmp.yuPU7WGu0I /tmp/tmp.LTWIvldxZN ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql /tmp/tmp.ARylqDbRh4/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.IjBgs07aVX + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-1597~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2154-7a623b10#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_ERR=/tmp/tmp.UnBNdUlVl4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IjBgs07aVX perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.UnBNdUlVl4 + rm /tmp/tmp.IjBgs07aVX /tmp/tmp.UnBNdUlVl4 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JcePFK1rx4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bx8rVbXASy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JcePFK1rx4 ++ cat /tmp/tmp.bx8rVbXASy ++ rm /tmp/tmp.JcePFK1rx4 /tmp/tmp.bx8rVbXASy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8hogJqN2Ri +++ mktemp ++ local LAST_ERR=/tmp/tmp.zE9ZydgAW7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8hogJqN2Ri ++ cat /tmp/tmp.zE9ZydgAW7 ++ rm /tmp/tmp.8hogJqN2Ri /tmp/tmp.zE9ZydgAW7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZHmU7JfjCx +++ mktemp ++ local LAST_ERR=/tmp/tmp.qHCFRwNmDy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZHmU7JfjCx ++ cat /tmp/tmp.qHCFRwNmDy ++ rm /tmp/tmp.ZHmU7JfjCx /tmp/tmp.qHCFRwNmDy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4K7GMyIOgK +++ mktemp ++ local LAST_ERR=/tmp/tmp.C8E5sx3KEw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4K7GMyIOgK ++ cat /tmp/tmp.C8E5sx3KEw ++ rm /tmp/tmp.4K7GMyIOgK /tmp/tmp.C8E5sx3KEw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mzu3AcwrWH +++ mktemp ++ local LAST_ERR=/tmp/tmp.MLpWu8ufjK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Mzu3AcwrWH ++ cat /tmp/tmp.MLpWu8ufjK ++ rm /tmp/tmp.Mzu3AcwrWH /tmp/tmp.MLpWu8ufjK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ua4AwfgEax +++ mktemp ++ local LAST_ERR=/tmp/tmp.KehxEr3GKm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ua4AwfgEax ++ cat /tmp/tmp.KehxEr3GKm ++ rm /tmp/tmp.ua4AwfgEax /tmp/tmp.KehxEr3GKm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.knb7THoNg9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zoHpxked68 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.knb7THoNg9 ++ cat /tmp/tmp.zoHpxked68 ++ rm /tmp/tmp.knb7THoNg9 /tmp/tmp.zoHpxked68 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3dnvltAZiN +++ mktemp ++ local LAST_ERR=/tmp/tmp.O3TZ6EnNDb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3dnvltAZiN ++ cat /tmp/tmp.O3TZ6EnNDb ++ rm /tmp/tmp.3dnvltAZiN /tmp/tmp.O3TZ6EnNDb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RhIkQYZ0zz +++ mktemp ++ local LAST_ERR=/tmp/tmp.7J61EI6WeG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RhIkQYZ0zz ++ cat /tmp/tmp.7J61EI6WeG ++ rm /tmp/tmp.RhIkQYZ0zz /tmp/tmp.7J61EI6WeG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wty5y8O1wK +++ mktemp ++ local LAST_ERR=/tmp/tmp.kFiaxuks9V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wty5y8O1wK ++ cat /tmp/tmp.kFiaxuks9V ++ rm /tmp/tmp.wty5y8O1wK /tmp/tmp.kFiaxuks9V ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l8PqvsAEGf +++ mktemp ++ local LAST_ERR=/tmp/tmp.PjbuIMHL4k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l8PqvsAEGf ++ cat /tmp/tmp.PjbuIMHL4k ++ rm /tmp/tmp.l8PqvsAEGf /tmp/tmp.PjbuIMHL4k ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aMLoW7Tb1P +++ mktemp ++ local LAST_ERR=/tmp/tmp.sPWRkDua2X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aMLoW7Tb1P ++ cat /tmp/tmp.sPWRkDua2X ++ rm /tmp/tmp.aMLoW7Tb1P /tmp/tmp.sPWRkDua2X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4GZhFY3CT3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RuTtN2dE1F ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4GZhFY3CT3 ++ cat /tmp/tmp.RuTtN2dE1F ++ rm /tmp/tmp.4GZhFY3CT3 /tmp/tmp.RuTtN2dE1F ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yU4vR4zMXs +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z524DjL9M2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yU4vR4zMXs ++ cat /tmp/tmp.Z524DjL9M2 ++ rm /tmp/tmp.yU4vR4zMXs /tmp/tmp.Z524DjL9M2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i4iJyrV2mj +++ mktemp ++ local LAST_ERR=/tmp/tmp.lDmTue18Jd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i4iJyrV2mj ++ cat /tmp/tmp.lDmTue18Jd ++ rm /tmp/tmp.i4iJyrV2mj /tmp/tmp.lDmTue18Jd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LFCHTC5sVl +++ mktemp ++ local LAST_ERR=/tmp/tmp.BM7hxfwUpv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LFCHTC5sVl ++ cat /tmp/tmp.BM7hxfwUpv ++ rm /tmp/tmp.LFCHTC5sVl /tmp/tmp.BM7hxfwUpv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nWxYd7VrQT +++ mktemp ++ local LAST_ERR=/tmp/tmp.c7GeQypXJQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nWxYd7VrQT ++ cat /tmp/tmp.c7GeQypXJQ ++ rm /tmp/tmp.nWxYd7VrQT /tmp/tmp.c7GeQypXJQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6jt3FSAecw +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZhUcdq8ZTE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6jt3FSAecw ++ cat /tmp/tmp.ZhUcdq8ZTE ++ rm /tmp/tmp.6jt3FSAecw /tmp/tmp.ZhUcdq8ZTE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FYvKNRQXcn +++ mktemp ++ local LAST_ERR=/tmp/tmp.lgudukS10x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FYvKNRQXcn ++ cat /tmp/tmp.lgudukS10x ++ rm /tmp/tmp.FYvKNRQXcn /tmp/tmp.lgudukS10x ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nDyOI2Kxdm +++ mktemp ++ local LAST_ERR=/tmp/tmp.MFkQFVmpPy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nDyOI2Kxdm ++ cat /tmp/tmp.MFkQFVmpPy ++ rm /tmp/tmp.nDyOI2Kxdm /tmp/tmp.MFkQFVmpPy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8iBSsNx3lQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.bqP0Jq68FS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8iBSsNx3lQ ++ cat /tmp/tmp.bqP0Jq68FS ++ rm /tmp/tmp.8iBSsNx3lQ /tmp/tmp.bqP0Jq68FS ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zrSqwvMxet +++ mktemp ++ local LAST_ERR=/tmp/tmp.DcoMrFmbeY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zrSqwvMxet ++ cat /tmp/tmp.DcoMrFmbeY ++ rm /tmp/tmp.zrSqwvMxet /tmp/tmp.DcoMrFmbeY ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kz66C3BOhp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.AFtgq3DCqR +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kz66C3BOhp +++++ cat /tmp/tmp.AFtgq3DCqR +++++ rm /tmp/tmp.kz66C3BOhp /tmp/tmp.AFtgq3DCqR +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Db4uHGJ3v2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.n6HCz83Pzt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Db4uHGJ3v2 ++ cat /tmp/tmp.n6HCz83Pzt ++ rm /tmp/tmp.Db4uHGJ3v2 /tmp/tmp.n6HCz83Pzt ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 2 haproxy some-name + local generation=2 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VdT7gvWNmc +++ mktemp ++ local LAST_ERR=/tmp/tmp.hM6XATC2RX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VdT7gvWNmc ++ cat /tmp/tmp.hM6XATC2RX ++ rm /tmp/tmp.VdT7gvWNmc /tmp/tmp.hM6XATC2RX ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.d46VeFEXiU ++ mktemp + local LAST_ERR=/tmp/tmp.G1fIWMNX2q + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.d46VeFEXiU secret/my-cluster-secrets patched + cat /tmp/tmp.G1fIWMNX2q + rm /tmp/tmp.d46VeFEXiU /tmp/tmp.G1fIWMNX2q + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dFDYdIDIab +++ mktemp ++ local LAST_ERR=/tmp/tmp.IXJcOMlm5K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dFDYdIDIab ++ cat /tmp/tmp.IXJcOMlm5K ++ rm /tmp/tmp.dFDYdIDIab /tmp/tmp.IXJcOMlm5K ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cvdb8mXRBO +++ mktemp ++ local LAST_ERR=/tmp/tmp.L7FIQMy19T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cvdb8mXRBO ++ cat /tmp/tmp.L7FIQMy19T ++ rm /tmp/tmp.cvdb8mXRBO /tmp/tmp.L7FIQMy19T ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FuMhghs4nm +++ mktemp ++ local LAST_ERR=/tmp/tmp.yBYmTJSkUE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FuMhghs4nm ++ cat /tmp/tmp.yBYmTJSkUE ++ rm /tmp/tmp.FuMhghs4nm /tmp/tmp.yBYmTJSkUE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2d4TnYtQWB +++ mktemp ++ local LAST_ERR=/tmp/tmp.4VQK3THxY6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2d4TnYtQWB ++ cat /tmp/tmp.4VQK3THxY6 ++ rm /tmp/tmp.2d4TnYtQWB /tmp/tmp.4VQK3THxY6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H2A1G0W0jz +++ mktemp ++ local LAST_ERR=/tmp/tmp.38HU7CEGXs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H2A1G0W0jz ++ cat /tmp/tmp.38HU7CEGXs ++ rm /tmp/tmp.H2A1G0W0jz /tmp/tmp.38HU7CEGXs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b6lHA3A4BX +++ mktemp ++ local LAST_ERR=/tmp/tmp.43oquAqRl3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b6lHA3A4BX ++ cat /tmp/tmp.43oquAqRl3 ++ rm /tmp/tmp.b6lHA3A4BX /tmp/tmp.43oquAqRl3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aDfuewzD0L +++ mktemp ++ local LAST_ERR=/tmp/tmp.U1LAnJdxXc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aDfuewzD0L ++ cat /tmp/tmp.U1LAnJdxXc ++ rm /tmp/tmp.aDfuewzD0L /tmp/tmp.U1LAnJdxXc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cfa4vCn53h +++ mktemp ++ local LAST_ERR=/tmp/tmp.PW0c7iLu75 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cfa4vCn53h ++ cat /tmp/tmp.PW0c7iLu75 ++ rm /tmp/tmp.cfa4vCn53h /tmp/tmp.PW0c7iLu75 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XoF4R98Iby +++ mktemp ++ local LAST_ERR=/tmp/tmp.9BKgDRd98K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XoF4R98Iby ++ cat /tmp/tmp.9BKgDRd98K ++ rm /tmp/tmp.XoF4R98Iby /tmp/tmp.9BKgDRd98K ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sbcGoJuC2Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.M6bCwhCKbr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sbcGoJuC2Y ++ cat /tmp/tmp.M6bCwhCKbr ++ rm /tmp/tmp.sbcGoJuC2Y /tmp/tmp.M6bCwhCKbr ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.gwkLpF0pFG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.TKBRoOhnd3 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.gwkLpF0pFG +++++ cat /tmp/tmp.TKBRoOhnd3 +++++ rm /tmp/tmp.gwkLpF0pFG /tmp/tmp.TKBRoOhnd3 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BMTgmmgA0O +++ mktemp ++ local LAST_ERR=/tmp/tmp.6ULhux8MHg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BMTgmmgA0O ++ cat /tmp/tmp.6ULhux8MHg ++ rm /tmp/tmp.BMTgmmgA0O /tmp/tmp.6ULhux8MHg ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3-57.sql ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tQk9eK90uL +++ mktemp ++ local LAST_ERR=/tmp/tmp.6sPdIiZxvl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tQk9eK90uL ++ cat /tmp/tmp.6sPdIiZxvl ++ rm /tmp/tmp.tQk9eK90uL /tmp/tmp.6sPdIiZxvl ++ return 0 + client_pod=pxc-client-857d976497-t7kqm + wait_pod pxc-client-857d976497-t7kqm + local pod=pxc-client-857d976497-t7kqm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-t7kqm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-t7kqm condition met waiting for pod/pxc-client-857d976497-t7kqm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ARylqDbRh4/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3.sql /tmp/tmp.ARylqDbRh4/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 3 haproxy some-name + local generation=3 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1qsZeGRCfF +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gwaf6hNI2D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1qsZeGRCfF ++ cat /tmp/tmp.Gwaf6hNI2D ++ rm /tmp/tmp.1qsZeGRCfF /tmp/tmp.Gwaf6hNI2D ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + destroy users-1597 + local namespace=users-1597 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' ++ get_operator_pod + grep -v 'the object has been modified' ++ local label_prefix=app.kubernetes.io/ + grep -v 'get backup status: Job.batch' + sort -u + tee /tmp/tmp.ARylqDbRh4/operator.log + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator + grep -v level=info ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.7DI8JbRC0p +++ mktemp ++ local LAST_ERR=/tmp/tmp.jvmEsoNJUA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7DI8JbRC0p ++ cat /tmp/tmp.jvmEsoNJUA ++ rm /tmp/tmp.7DI8JbRC0p /tmp/tmp.jvmEsoNJUA ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-6cf85965f9-dc5tx ++ mktemp + local LAST_OUT=/tmp/tmp.PIMFByrqwZ ++ mktemp + local LAST_ERR=/tmp/tmp.R0Kd21QHO2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-6cf85965f9-dc5tx + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PIMFByrqwZ + cat /tmp/tmp.R0Kd21QHO2 + rm /tmp/tmp.PIMFByrqwZ /tmp/tmp.R0Kd21QHO2 + return 0 } }, }, { }, }, { }, }, ""), }, { }, }, }, - }, - { - }, - { - }, - }, + }, - "03d7d8938513267f1e2c65bb99e743300097e787ca75736b671cb9ef2c75be1a", + "162919c08d7d1c7642a69fe3171501c8f0db8e2984e705edffec30f2086bdbe1", ... // 16 identical fields ... // 16 identical fields 2025-11-19T18:45:27.164Z INFO setup Manager starting up {"gitCommit": "7a623b10a97567887377e516f24d3500d7412fc7", "gitBranch": "PR-2154-7a623b10", "buildTime": "2025-11-19T16:30:53Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-11-19T18:45:27.164Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1377000"} 2025-11-19T18:45:27.167Z INFO setup Registering Components. 2025-11-19T18:45:27.646Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-11-19T18:45:27.646Z INFO setup Starting the Cmd. 2025-11-19T18:45:27.647Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-11-19T18:45:27.647Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-11-19T18:45:27.647Z INFO controller-runtime.metrics Starting metrics server 2025-11-19T18:45:27.647Z INFO controller-runtime.webhook Starting webhook server 2025-11-19T18:45:27.647Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-11-19T18:45:27.648Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-11-19T18:45:27.648Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-11-19T18:45:27.749Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-11-19T18:45:27.785Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-11-19T18:45:27.786Z DEBUG events percona-xtradb-cluster-operator-6cf85965f9-dc5tx_77943405-2816-4c10-b931-9fd5aed8ea56 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"0301e0df-23bf-4251-acc1-e3fb40c7e802","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1763577927778095009"}, "reason": "LeaderElection"} 2025-11-19T18:45:27.786Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-11-19T18:45:27.786Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-11-19T18:45:27.786Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-11-19T18:45:27.786Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-11-19T18:45:27.886Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-11-19T18:45:27.886Z INFO Starting Controller {"controller": "pxc-controller"} 2025-11-19T18:45:27.886Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-11-19T18:45:27.886Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-11-19T18:45:27.886Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-11-19T18:45:27.886Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-11-19T18:46:06.760Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "7cfcd73c-0137-4cf3-a8e8-e7030fab5af2", "version": "1.19.0"} 2025-11-19T18:46:06.994Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "7cfcd73c-0137-4cf3-a8e8-e7030fab5af2", "secrets": "my-cluster-secrets"} 2025-11-19T18:46:07.211Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "7cfcd73c-0137-4cf3-a8e8-e7030fab5af2", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-19T18:46:07.229Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "7cfcd73c-0137-4cf3-a8e8-e7030fab5af2", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-19T18:46:07.801Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "7cfcd73c-0137-4cf3-a8e8-e7030fab5af2", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:46:07.903Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "8d97084d-efd0-4f54-a228-88fdb65a57a8", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-19T18:46:07.939Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "8d97084d-efd0-4f54-a228-88fdb65a57a8", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-19T18:46:07.988Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "8d97084d-efd0-4f54-a228-88fdb65a57a8", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:46:08.039Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "8d97084d-efd0-4f54-a228-88fdb65a57a8", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:46:08.104Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "8d97084d-efd0-4f54-a228-88fdb65a57a8", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:46:08.303Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "8d97084d-efd0-4f54-a228-88fdb65a57a8", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:46:09.508Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "2a08db64-5751-44f0-b29d-2a4714947794", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-19T18:46:09.531Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "2a08db64-5751-44f0-b29d-2a4714947794", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-19T18:47:20.792Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "cf1b437a-c49c-4830-8dab-7696a2fd81e5", "user": "operator"} 2025-11-19T18:47:20.821Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "cf1b437a-c49c-4830-8dab-7696a2fd81e5", "user": "monitor"} 2025-11-19T18:47:20.866Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "cf1b437a-c49c-4830-8dab-7696a2fd81e5"} 2025-11-19T18:47:20.899Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "cf1b437a-c49c-4830-8dab-7696a2fd81e5", "user": "xtrabackup"} 2025-11-19T18:47:20.928Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "cf1b437a-c49c-4830-8dab-7696a2fd81e5"} 2025-11-19T18:47:20.937Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "cf1b437a-c49c-4830-8dab-7696a2fd81e5", "err": "get primary pxc pod: not found"} 2025-11-19T18:47:25.778Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "c3fc331a-5c84-48ae-bd91-2edb996927ed", "err": "get primary pxc pod: not found"} 2025-11-19T18:47:31.001Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "0c2e8e4e-3e35-4304-95d4-74d9588ffb02", "err": "get primary pxc pod: not found"} 2025-11-19T18:47:36.134Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fc5a4721-1b1c-4e30-9cfc-4adf1c5fe081", "err": "get primary pxc pod: not found"} 2025-11-19T18:49:41.775Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "efe5eec9-48da-4ca3-b598-90272e11a161", "user": "root"} 2025-11-19T18:49:41.826Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "efe5eec9-48da-4ca3-b598-90272e11a161", "user": "replication"} 2025-11-19T18:49:41.882Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "efe5eec9-48da-4ca3-b598-90272e11a161", "new version": "5.7.44-48-57"} 2025-11-19T18:49:43.725Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "efe5eec9-48da-4ca3-b598-90272e11a161"} 2025-11-19T18:49:49.045Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "831ee46e-1aea-4fe8-9a5a-f860af0af1c4"} 2025-11-19T18:49:54.358Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "488a0724-bd10-44e1-8cdd-07d090a9c27f"} 2025-11-19T18:49:59.634Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "3395d1bc-a64b-498d-b098-316dc7a65714"} 2025-11-19T18:50:04.769Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "e81eb360-0276-4fe8-afea-94175c3244e0"} 2025-11-19T18:50:09.934Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bb2a7375-3a6c-416b-991a-071d97f91b4e"} 2025-11-19T18:50:15.444Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "a886da21-d305-4cd6-b475-86d757469604"} 2025-11-19T18:50:20.653Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "d20115bd-1627-4daf-8467-94b3169ebb33"} 2025-11-19T18:50:25.673Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "4eefb0b3-d73d-4532-be1d-11d8bce262e6"} 2025-11-19T18:50:31.173Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "2d9bc26c-3670-45e5-b2d4-3abb73ea5ff2"} 2025-11-19T18:50:36.534Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "13865a29-9e94-44d4-9292-9e430f3b586b"} 2025-11-19T18:50:41.453Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "3a346445-aaf5-4b75-bcdf-229afa046cdb"} 2025-11-19T18:50:47.036Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "cf1fdc0f-ea37-46b7-89e0-04be3feccb1f"} 2025-11-19T18:50:52.228Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "46854f6f-101d-411e-8059-daff95e28a46"} 2025-11-19T18:50:57.243Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "a4179187-2efb-49d0-9351-7b6260413bd6"} 2025-11-19T18:51:03.132Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "93f50fc0-116b-4a27-98d5-93af59b8eda8"} 2025-11-19T18:51:07.949Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "a17c6ef9-11af-4525-a505-e4fe3928219d"} 2025-11-19T18:51:12.968Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "45803e7c-10ea-4f4d-a720-cb19130c5205"} 2025-11-19T18:51:18.996Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "b52659d0-9be2-4eb0-88b3-b3ea618cffd9", "user": "root"} 2025-11-19T18:51:19.011Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "b52659d0-9be2-4eb0-88b3-b3ea618cffd9", "user": "root"} 2025-11-19T18:51:19.028Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "b52659d0-9be2-4eb0-88b3-b3ea618cffd9", "secret": "some-name-mysql-init", "user": "root"} 2025-11-19T18:51:19.147Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "0de7ade3-2a39-4cf3-8ca9-04623bd6d783"} 2025-11-19T18:51:21.557Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "b52659d0-9be2-4eb0-88b3-b3ea618cffd9"} 2025-11-19T18:51:21.590Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "b52659d0-9be2-4eb0-88b3-b3ea618cffd9", "user": "root"} 2025-11-19T18:51:23.068Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "b52659d0-9be2-4eb0-88b3-b3ea618cffd9"} 2025-11-19T18:51:29.776Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "071cd3f9-8cba-4c8d-b1d0-292a9b51d421"} 2025-11-19T18:51:34.933Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "53c27a06-470c-4e24-8d70-e942bf05fa14"} 2025-11-19T18:51:38.119Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "7fc42fff-3ea2-4642-a197-b0fcd184e1c3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:51:38.175Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "7fc42fff-3ea2-4642-a197-b0fcd184e1c3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:51:41.225Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "7fc42fff-3ea2-4642-a197-b0fcd184e1c3", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:52:04.895Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "eb533154-c363-4e2b-b67a-ad7047c3b5ac", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:52:05.181Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "188c8b5a-3a60-4485-ad04-82f258837b30", "err": "get primary pxc pod: not found"} 2025-11-19T18:52:05.593Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "0f93a9ec-a24e-43f0-a7f2-fe5a759698f3", "user": "proxyadmin"} 2025-11-19T18:52:05.593Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "0f93a9ec-a24e-43f0-a7f2-fe5a759698f3", "user": "proxyadmin"} 2025-11-19T18:52:05.622Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "0f93a9ec-a24e-43f0-a7f2-fe5a759698f3", "user": "proxyadmin"} 2025-11-19T18:52:05.642Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "0f93a9ec-a24e-43f0-a7f2-fe5a759698f3", "user": "proxyadmin"} 2025-11-19T18:52:05.642Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "0f93a9ec-a24e-43f0-a7f2-fe5a759698f3", "last-applied-secret": "03d7d8938513267f1e2c65bb99e743300097e787ca75736b671cb9ef2c75be1a"} 2025-11-19T18:52:05.645Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "0f93a9ec-a24e-43f0-a7f2-fe5a759698f3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:52:06.991Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "188c8b5a-3a60-4485-ad04-82f258837b30", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:509) : Galera hostgroup retrieval failed. \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:509) : Galera hostgroup retrieval failed. \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:52:20.903Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "24954837-c7c3-449b-baa4-cf22db9b6066", "err": "get primary pxc pod: not found"} 2025-11-19T18:52:26.215Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f4df3c01-14f1-4cc6-b8f2-d5275990e6d3", "err": "get primary pxc pod: not found"} 2025-11-19T18:52:31.592Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "cc9c4021-c9eb-4d8a-9009-8ffd9a6c3658", "err": "get primary pxc pod: not found"} 2025-11-19T18:52:53.133Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "28e2b87d-56cb-443c-a5c5-d452f092676a"} 2025-11-19T18:52:57.446Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "44d11751-72b8-4958-b6a9-0966cb42abef", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:52:57.493Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "44d11751-72b8-4958-b6a9-0966cb42abef", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:52:58.632Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "91ddcec6-6668-4439-a2de-27736aa4a1eb", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:52:58.869Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "9f6e0ac3-f2c8-47d7-9483-14247b252862", "user": "xtrabackup"} 2025-11-19T18:52:58.882Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "9f6e0ac3-f2c8-47d7-9483-14247b252862", "user": "xtrabackup"} 2025-11-19T18:52:58.908Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "9f6e0ac3-f2c8-47d7-9483-14247b252862", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-19T18:52:58.925Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "9f6e0ac3-f2c8-47d7-9483-14247b252862", "user": "xtrabackup"} 2025-11-19T18:52:58.926Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "9f6e0ac3-f2c8-47d7-9483-14247b252862", "last-applied-secret": "7b7e2460e56b11b8bb02ff2825972bd41d53c3b4d7c9bb4752c525e391ad6608"} 2025-11-19T18:52:58.929Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "9f6e0ac3-f2c8-47d7-9483-14247b252862", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:53:01.438Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "9f6e0ac3-f2c8-47d7-9483-14247b252862"} 2025-11-19T18:54:49.801Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "2f6fe8e3-daec-485c-823b-f051215d3348", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-1597 on 34.118.224.10:53: no such host"} 2025-11-19T18:54:54.955Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "1869d0df-12b8-4087-a9d0-943c0f0c6ccf", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T18:55:00.104Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "0e198f89-fcd9-4168-91b1-fce165ad8958", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T18:55:05.257Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "44075c9e-458f-434e-8b4f-10e52cb9971c", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T18:55:10.466Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "b88222d7-4fba-42b2-a52f-0606b5878d57", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T18:55:15.612Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "e8be6c69-51c9-4111-b740-1fd658a09519", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T18:55:20.756Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "63bba0d6-4310-4b48-b572-9856e27b70cf", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T18:55:25.895Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "9bad83c8-a784-4699-b2de-14e692c14536", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T18:55:31.083Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "8b7fc7fe-ae3b-45bd-b9c9-23598d141ab8", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T18:55:38.859Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "8f5342e2-0808-4820-a92a-30592de36b37"} 2025-11-19T18:55:41.820Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "5e09a241-67a8-44cc-9c2e-702809081052", "user": "monitor"} 2025-11-19T18:55:41.832Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "5e09a241-67a8-44cc-9c2e-702809081052", "user": "monitor"} 2025-11-19T18:55:41.862Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "5e09a241-67a8-44cc-9c2e-702809081052", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-19T18:55:41.881Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "5e09a241-67a8-44cc-9c2e-702809081052", "user": "monitor"} 2025-11-19T18:55:41.901Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "5e09a241-67a8-44cc-9c2e-702809081052", "user": "monitor"} 2025-11-19T18:55:41.901Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "5e09a241-67a8-44cc-9c2e-702809081052", "last-applied-secret": "a92d9b286dcedda4232265349bcba7b615814e21f7b8a378645e5b42a9b22c11"} 2025-11-19T18:55:41.905Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "5e09a241-67a8-44cc-9c2e-702809081052", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:55:44.222Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "5e09a241-67a8-44cc-9c2e-702809081052", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:56:45.843Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "58afa671-c11c-4b96-8f38-e7202c631bcb"} 2025-11-19T18:56:50.815Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "c9d09319-7656-41d6-ba8d-cfd313b75b7f"} 2025-11-19T18:56:56.217Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fe6c94e1-d47e-4aea-a3dd-dd80d1c124fc"} 2025-11-19T18:57:01.233Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "83de6f52-7b86-459f-85c9-73070a0b4557"} 2025-11-19T18:57:06.253Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f48a8141-c4ec-4b19-aa44-fa4952d122a2", "user": "operator"} 2025-11-19T18:57:06.265Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f48a8141-c4ec-4b19-aa44-fa4952d122a2", "user": "operator"} 2025-11-19T18:57:06.282Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f48a8141-c4ec-4b19-aa44-fa4952d122a2", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-19T18:57:06.300Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f48a8141-c4ec-4b19-aa44-fa4952d122a2", "user": "operator"} 2025-11-19T18:57:06.300Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f48a8141-c4ec-4b19-aa44-fa4952d122a2", "last-applied-secret": "ec6dc09d2d0b3a9ce7a92fbaf49537c8f6c3d9f6dc8ddba73e95751e05a6e19f"} 2025-11-19T18:57:06.304Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f48a8141-c4ec-4b19-aa44-fa4952d122a2", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:57:07.455Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "3bbdafa7-5eac-4bdb-8301-c7c842f3c529", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:57:50.997Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "c6219b3b-7183-4027-a4d3-41e96e5e6675"} 2025-11-19T18:57:55.309Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "ee1f8f5b-2cb2-45c4-936e-66f19f00061b"} 2025-11-19T18:58:00.493Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "a29a410c-f85b-43cf-80b0-1e022b14a6af"} 2025-11-19T18:58:05.801Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "3c33dde2-5014-4e75-a92b-cbb884cf164b"} 2025-11-19T18:58:10.364Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "secrets": "my-cluster-secrets-2"} 2025-11-19T18:58:10.364Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "root"} 2025-11-19T18:58:10.379Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "root"} 2025-11-19T18:58:10.404Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "secret": "some-name-mysql-init", "user": "root"} 2025-11-19T18:58:11.531Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "55024a82-4a7a-4ba4-8950-1637382d938e"} 2025-11-19T18:58:12.410Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26"} 2025-11-19T18:58:12.444Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "root"} 2025-11-19T18:58:12.444Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "operator"} 2025-11-19T18:58:12.454Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "operator"} 2025-11-19T18:58:12.477Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-19T18:58:12.502Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "operator"} 2025-11-19T18:58:12.502Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "monitor"} 2025-11-19T18:58:12.514Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "monitor"} 2025-11-19T18:58:12.535Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-19T18:58:12.553Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "monitor"} 2025-11-19T18:58:12.588Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "monitor"} 2025-11-19T18:58:12.588Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "xtrabackup"} 2025-11-19T18:58:12.618Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "xtrabackup"} 2025-11-19T18:58:12.654Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-19T18:58:12.678Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "xtrabackup"} 2025-11-19T18:58:12.678Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "replication"} 2025-11-19T18:58:12.690Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "replication"} 2025-11-19T18:58:12.707Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-19T18:58:12.730Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "replication"} 2025-11-19T18:58:12.730Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "proxyadmin"} 2025-11-19T18:58:12.748Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "proxyadmin"} 2025-11-19T18:58:12.769Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "user": "proxyadmin"} 2025-11-19T18:58:12.769Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "last-applied-secret": "bcb48313682c8f42db8567efedb1f434d43c0dafec63d1886952293b0cfc7080"} 2025-11-19T18:58:12.769Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "last-applied-secret": "bcb48313682c8f42db8567efedb1f434d43c0dafec63d1886952293b0cfc7080"} 2025-11-19T18:58:12.774Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:58:12.832Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:58:14.822Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bc434e71-68f4-4d36-8191-fa1062919c26", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T19:00:04.311Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "17e018b4-7cc1-4214-9e8d-517f1549b72e", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T19:00:14.699Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "36ba5292-ff80-4546-8834-a7c8668857c5", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T19:00:19.857Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "8539de2d-427a-4c3a-84e9-97bbb59b1b06", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T19:00:30.119Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "76ab03a3-2e64-4991-ba51-a30908248209", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T19:00:40.390Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "4d056111-fd58-413b-9ef4-d1b0b2def78f", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T19:00:48.145Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "26b776dc-a6b4-496b-a50a-3334bb4a5457"} 2025-11-19T19:00:53.077Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "4c463985-3579-4514-ba3b-43143976f508"} 2025-11-19T19:00:53.685Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "d677ef74-20d5-42c1-92bc-4006e70b35f1", "user": "operator"} 2025-11-19T19:00:53.697Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "d677ef74-20d5-42c1-92bc-4006e70b35f1", "user": "operator"} 2025-11-19T19:00:53.719Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "d677ef74-20d5-42c1-92bc-4006e70b35f1", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-19T19:00:53.743Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "d677ef74-20d5-42c1-92bc-4006e70b35f1", "user": "operator"} 2025-11-19T19:00:53.743Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "d677ef74-20d5-42c1-92bc-4006e70b35f1", "last-applied-secret": "fcc27471c2e20405fcf3266c90d1fd5bcc356dabca1aceb35e3a3234742eab4f"} 2025-11-19T19:00:53.747Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "d677ef74-20d5-42c1-92bc-4006e70b35f1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T19:00:57.465Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "d677ef74-20d5-42c1-92bc-4006e70b35f1", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T19:01:27.130Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "ed286075-f52d-4104-af24-c5d2a0518550"} 2025-11-19T19:01:31.549Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "60f3dc8c-94a0-4852-a8ac-eba012b01983"} 2025-11-19T19:01:36.735Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "23807038-857d-4b0e-aa59-7c17e8e16955"} 2025-11-19T19:01:42.013Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "269ecf2c-ffc7-4248-a9c7-24e0cdd15ce4"} 2025-11-19T19:01:47.235Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "a5c087a9-6ca1-468d-8911-a35779f9356b"} 2025-11-19T19:01:52.152Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "c2de8213-5d5e-4391-9c16-17cde8adb445"} 2025-11-19T19:01:57.519Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "47b12676-bab9-486a-b871-dee6f38095d4"} 2025-11-19T19:02:03.463Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "90660713-63d1-4c92-8aa7-0aeefeffe98f"} 2025-11-19T19:02:08.106Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "808efa8f-5758-49c9-ad94-4822ad606e8f"} 2025-11-19T19:02:13.431Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "3b489767-42af-4f6d-b0a4-febec00e9096"} 2025-11-19T19:02:19.607Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "3b058cd5-62d0-4576-9acf-06bd6be79ff3"} 2025-11-19T19:02:24.813Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "1e410453-91e9-4beb-a1cc-9757817f83d9"} 2025-11-19T19:02:30.135Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "a19573e8-49ee-4952-803c-dd860b04584a"} 2025-11-19T19:02:35.327Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "0f8ea868-470e-40cd-84dd-bc0475546562"} 2025-11-19T19:02:40.437Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "1e31b68f-8d76-4f1c-87cb-daabfcd5d81f"} 2025-11-19T19:02:44.501Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "root"} 2025-11-19T19:02:44.517Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "root"} 2025-11-19T19:02:44.542Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "secret": "some-name-mysql-init", "user": "root"} 2025-11-19T19:02:47.183Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "6c3d1370-e238-42c4-9ac6-d705831e6291"} 2025-11-19T19:02:47.733Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99"} 2025-11-19T19:02:47.758Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "root"} 2025-11-19T19:02:47.758Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "monitor"} 2025-11-19T19:02:47.769Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "monitor"} 2025-11-19T19:02:47.793Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-19T19:02:47.811Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "monitor"} 2025-11-19T19:02:47.840Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "monitor"} 2025-11-19T19:02:47.840Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "xtrabackup"} 2025-11-19T19:02:47.851Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "xtrabackup"} 2025-11-19T19:02:47.874Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-19T19:02:47.901Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "xtrabackup"} 2025-11-19T19:02:47.901Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "proxyadmin"} 2025-11-19T19:02:47.919Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "proxyadmin"} 2025-11-19T19:02:47.943Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "user": "proxyadmin"} 2025-11-19T19:02:47.943Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "last-applied-secret": "d25a44b5f0bf58eee7b9a5fe441743aa57762d5845b9a8e740b818096dbbdcb7"} 2025-11-19T19:02:47.943Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "last-applied-secret": "d25a44b5f0bf58eee7b9a5fe441743aa57762d5845b9a8e740b818096dbbdcb7"} 2025-11-19T19:02:47.946Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T19:02:48.030Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T19:02:50.224Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "afab9c95-3cb3-46be-bd33-d26894bb5b99", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T19:04:23.541Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "9eb104d7-5e5d-4667-8aa6-5bbb4be6a036", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-1597 on 34.118.224.10:53: no such host"} 2025-11-19T19:04:24.349Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "7bcd91c8-bb09-45e6-902e-603cffb2ca68", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-1597 on 34.118.224.10:53: no such host"} 2025-11-19T19:04:28.942Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "46b360bf-c7a0-401c-8cb8-28bc7b4b5bd0", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-1597 on 34.118.224.10:53: no such host"} 2025-11-19T19:04:39.398Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "c5e4282b-f109-499e-ad73-c0d33fba5b29", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T19:04:54.959Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "85e0a800-493f-4b3b-a9c8-2c8ee3441dd1", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T19:05:05.242Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "ed7f41c4-cfa6-4787-add5-018577e8199f", "primary name": "some-name-pxc-0.some-name-pxc.users-1597.svc.cluster.local"} 2025-11-19T19:05:22.390Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "root"} 2025-11-19T19:05:22.404Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "root"} 2025-11-19T19:05:22.424Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "secret": "some-name-mysql-init", "user": "root"} 2025-11-19T19:05:22.445Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "root"} 2025-11-19T19:05:22.445Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "operator"} 2025-11-19T19:05:22.456Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "operator"} 2025-11-19T19:05:22.477Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-19T19:05:22.501Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "operator"} 2025-11-19T19:05:22.501Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "monitor"} 2025-11-19T19:05:22.511Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "monitor"} 2025-11-19T19:05:22.536Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-19T19:05:22.559Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "monitor"} 2025-11-19T19:05:22.559Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "xtrabackup"} 2025-11-19T19:05:22.571Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "xtrabackup"} 2025-11-19T19:05:22.591Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-19T19:05:22.610Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "xtrabackup"} 2025-11-19T19:05:22.610Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "replication"} 2025-11-19T19:05:22.620Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "replication"} 2025-11-19T19:05:22.638Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-19T19:05:22.662Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "last-applied-secret": "ec6dc09d2d0b3a9ce7a92fbaf49537c8f6c3d9f6dc8ddba73e95751e05a6e19f"} 2025-11-19T19:05:22.662Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "user": "replication"} 2025-11-19T19:05:22.662Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "last-applied-secret": "ec6dc09d2d0b3a9ce7a92fbaf49537c8f6c3d9f6dc8ddba73e95751e05a6e19f"} 2025-11-19T19:05:22.663Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T19:05:22.724Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-19T19:05:22.774Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T19:05:22.885Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T19:05:22.990Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T19:05:23.197Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T19:05:24.496Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "40c7e2ce-9840-452f-8b22-6feafd613afd", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-1\" not found / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-1\" not found / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-1597.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T19:05:26.814Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "fae845c3-bf6c-46ce-9fd6-57837e7d61a1", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.227.231:3306: connect: connection refused"} 2025-11-19T19:05:27.527Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "dd8335b3-f2fe-4d52-8d77-cf3d0cb395d5", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-19T19:07:47.978Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f39f8f4a-73ea-45b8-bbee-bf11c378e9b5", "user": "monitor"} 2025-11-19T19:07:47.991Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f39f8f4a-73ea-45b8-bbee-bf11c378e9b5", "user": "monitor"} 2025-11-19T19:07:48.014Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f39f8f4a-73ea-45b8-bbee-bf11c378e9b5", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-19T19:07:48.037Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f39f8f4a-73ea-45b8-bbee-bf11c378e9b5", "last-applied-secret": "162919c08d7d1c7642a69fe3171501c8f0db8e2984e705edffec30f2086bdbe1"} 2025-11-19T19:07:48.037Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f39f8f4a-73ea-45b8-bbee-bf11c378e9b5", "user": "monitor"} 2025-11-19T19:07:48.039Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "f39f8f4a-73ea-45b8-bbee-bf11c378e9b5", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T19:07:55.742Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-1597", "name": "some-name", "reconcileID": "bb333b2e-db78-4594-b611-355d729d84a0", "err": "get primary pxc pod: failed to get proxy connection: invalid connection"} ... // 22 identical fields - "25a44b5f0bf58eee7b9a5fe441743aa57762d5845b9a8e740b818096dbbdcb7", ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields ... // 3 identical elements ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields ... // 4 identical fields ... // 5 identical fields ... // 5 identical fields ... // 5 identical fields ... // 6 identical fields ... // 6 identical fields - "7b7e2460e56b11b8bb02ff2825972bd41d53c3b4d7c9bb4752c525e391ad6608", ... // 7 identical fields ... // 8 identical fields ... // 9 identical fields ... // 9 identical fields - "a92d9b286dcedda4232265349bcba7b615814e21f7b8a378645e5b42a9b22c11", + "a92d9b286dcedda4232265349bcba7b615814e21f7b8a378645e5b42a9b22c11", AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ + APIVersion: "", - APIVersion: "apps/v1", - APIVersion: "apps/v1", - APIVersion: "v1", Args: {"haproxy"}, Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, - Args: []string{"logrotate"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, - "bcb48313682c8f42db8567efedb1f434d43c0dafec63d1886952293b0cfc7080", + "bcb48313682c8f42db8567efedb1f434d43c0dafec63d1886952293b0cfc7080", + "c09d2d0b3a9ce7a92fbaf49537c8f6c3d9f6dc8ddba73e95751e05a6e19f", Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMapKeyRef: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 3307, ContainerPort: 3309, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, ContainerPort: 8404, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-11-19 18:46:07 +0000 UTC"}, - CreationTimestamp: v1.Time{Time: s"2025-11-19 19:05:22 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-haproxy-6f4b8688b4", - CurrentRevision: "some-name-proxysql-5d64bd669b", - CurrentRevision: "some-name-proxysql-5d77458485", - CurrentRevision: "some-name-proxysql-69fd7477b7", - CurrentRevision: "some-name-proxysql-797987f65d", - CurrentRevision: "some-name-proxysql-95fcf5777", - CurrentRevision: "some-name-proxysql-f95ffff75", - CurrentRevision: "some-name-pxc-7cf8467469", - CurrentRevision: "some-name-pxc-7d56b668c6", - CurrentRevision: "some-name-pxc-84dbf4f64b", - CurrentRevision: "some-name-pxc-86d4d79487", "d", + "d25a44b5f0bf58eee7b9a5fe441743aa57762d5845b9a8e740b818096dbbdcb7", DataSource: nil, DataSourceRef: nil, - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", + "ec6", - "ec6dc09d2d0b3a9ce7a92fbaf49537c8f6c3d9f6dc8ddba73e95751e05a6e19f", + "ec6dc09d2d0b3a9ce7a92fbaf49537c8f6c3d9f6dc8ddba73e95751e05a6e19f", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-haproxy"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, - EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "READINESS_CHECK_TIMEOUT", Value: "1"}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, Env: []v1.EnvVar{ - Env: []v1.EnvVar{ EphemeralContainers: nil, FailureThreshold: 3, - "fcc27471c2e20405fcf3266c90d1fd5bcc356dabca1aceb35e3a3234742eab4f", + "fcc27471c2e20405fcf3266c90d1fd5bcc356dabca1aceb35e3a3234742eab4f", FC: nil, FieldPath: "metadata.name", FieldPath: "metadata.namespace", FieldRef: &v1.ObjectFieldSelector{ - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 HostAliases: nil, HostAliases: nil, HostIP: "", HostIPC: false, Hostname: "", HostPort: 0, - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", ImagePullPolicy: "Always", - ImagePullPolicy: "Always", ImagePullSecrets: nil, InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "haproxy", "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "03d7d8938513267f1e2c65bb99e743300097e787ca75736b671cb9ef2c75be1a", + "last-applied-secret": "7b7e2460e56b11b8bb02ff2825972bd41d53c3b4d7c9bb4752c525e391ad6608", + "last-applied-secret": "ec6dc09d2d0b3a9ce7a92fbaf49537c8f6c3d9f6dc8ddba73e95751e05a6e19f", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-haproxy"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, [mysql] 2025/11/19 18:52:57 packets.go:58 read tcp 10.198.249.50:49724->10.198.248.40:6032: read: connection reset by peer [mysql] 2025/11/19 19:07:19 packets.go:58 read tcp 10.198.249.50:45370->34.118.227.231:3306: i/o timeout [mysql] 2025/11/19 19:07:52 packets.go:58 unexpected EOF [mysql] 2025/11/19 19:07:53 packets.go:58 unexpected EOF [mysql] 2025/11/19 19:07:54 packets.go:58 unexpected EOF [mysql] 2025/11/19 19:07:55 packets.go:58 unexpected EOF [mysql] 2025/11/19 19:08:00 packets.go:58 unexpected EOF [mysql] 2025/11/19 19:08:01 packets.go:58 unexpected EOF Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, {Name: "CLUSTER_HASH", Value: "2626306"}, Name: "config", Name: "DEFAULT_AUTHENTICATION_PLUGIN", {Name: "haproxy-auto", VolumeSource: {EmptyDir: &{}}}, Name: "haproxy-custom", - {Name: "IS_LOGCOLLECTOR", Value: "yes"}, Name: "ist", {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - Name: "logrotate", - Name: "logs", {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, - {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"}, Name: "mysql-replicas", {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"}, Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, Name: "POD_NAME", Name: "POD_NAMESPASE", - {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, - {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, Name: "proxyadm", Name: "proxy-protocol", {Name: "READINESS_CHECK_TIMEOUT", Value: "15"}, - {Name: "SERVICE_TYPE", Value: "mysql"}, Name: "some-name-env-vars-haproxy", Namespace: "users-1597", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "03d7d8938513267f1e2c65bb99e743300097e787ca75736b671cb9ef2c75be1a", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "d25a44b5f0bf58eee7b9a5fe441743aa57762d5845b9a8e740b818096dbbdcb7", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "3862cdec-0c92-4f4e-9608-1d0e2f2044e9", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDNkN2Q4OTM4NTEzMjY3ZjFlMmM2NWJiOTllNzQzMzAwMDk3ZTc4N2NhNzU3MzZiNjcxY2I5ZWYyYzc1YmUxYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmNiNDgzMTM2ODJjOGY0MmRiODU2N2VmZWRiMWY0MzRkNDNjMGRhZmVjNjNkMTg4Njk1MjI5M2IwY2ZjNzA4MCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmNiNDgzMTM2ODJjOGY0MmRiODU2N2VmZWRiMWY0MzRkNDNjMGRhZmVjNjNkMTg4Njk1MjI5M2IwY2ZjNzA4MCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYTkyZDliMjg2ZGNlZGRhNDIzMjI2NTM0OWJjYmE3YjYxNTgxNGUyMWY3YjhhMzc4NjQ1ZTViNDJhOWIyMmMxMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYTkyZDliMjg2ZGNlZGRhNDIzMjI2NTM0OWJjYmE3YjYxNTgxNGUyMWY3YjhhMzc4NjQ1ZTViNDJhOWIyMmMxMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDI1YTQ0YjVmMGJmNThlZWU3YjlhNWZlNDQxNzQzYWE1Nzc2MmQ1ODQ1YjlhOGU3NDBiODE4MDk2ZGJiZGNiNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZmNjMjc0NzFjMmUyMDQwNWZjZjMyNjZjOTBkMWZkNWJjYzM1NmRhYmNhMWFjZWIzNWUzYTMyMzQ3NDJlYWI0ZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZmNjMjc0NzFjMmUyMDQwNWZjZjMyNjZjOTBkMWZkNWJjYzM1NmRhYmNhMWFjZWIzNWUzYTMyMzQ3NDJlYWI0ZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZWM2ZGMwOWQyZDBiM2E5Y2U3YTkyZmJhZjQ5NTM3YzhmNmMzZDlmNmRjOGRkYmE3M2U5NTc1MWUwNWE2ZTE5ZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZWM2ZGMwOWQyZDBiM2E5Y2U3YTkyZmJhZjQ5NTM3YzhmNmMzZDlmNmRjOGRkYmE3M2U5NTc1MWUwNWE2ZTE5ZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTYyOTE5YzA4ZDdkMWM3NjQyYTY5ZmUzMTcxNTAxYzhmMGRiOGUyOTg0ZTcwNWVkZmZlYzMwZjIwODZiZGJlMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZWM2ZGMwOWQyZDBiM2E5Y2U3YTkyZmJhZjQ5NTM3YzhmNmMzZDlmNmRjOGRkYmE3M2U5NTc1MWUwNWE2ZTE5ZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZWM2ZGMwOWQyZDBiM2E5Y2U3YTkyZmJhZjQ5NTM3YzhmNmMzZDlmNmRjOGRkYmE3M2U5NTc1MWUwNWE2ZTE5ZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSJ9fSwic3BlYyI6eyJ2b2x1bWVzIjpbeyJuYW1lIjoiaGFwcm94eS1jdXN0b20iLCJjb25maWdNYXAi"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiN2I3ZTI0NjBlNTZiMTFiOGJiMDJmZjI4MjU5NzJiZDQxZDUzYzNiNGQ3YzliYjQ3NTJjNTI1ZTM5MWFkNjYwOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiN2I3ZTI0NjBlNTZiMTFiOGJiMDJmZjI4MjU5NzJiZDQxZDUzYzNiNGQ3YzliYjQ3NTJjNTI1ZTM5MWFkNjYwOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmNiNDgzMTM2ODJjOGY0MmRiODU2N2VmZWRiMWY0MzRkNDNjMGRhZmVjNjNkMTg4Njk1MjI5M2IwY2ZjNzA4MCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmNiNDgzMTM2ODJjOGY0MmRiODU2N2VmZWRiMWY0MzRkNDNjMGRhZmVjNjNkMTg4Njk1MjI5M2IwY2ZjNzA4MCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDI1YTQ0YjVmMGJmNThlZWU3YjlhNWZlNDQxNzQzYWE1Nzc2MmQ1ODQ1YjlhOGU3NDBiODE4MDk2ZGJiZGNiNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDI1YTQ0YjVmMGJmNThlZWU3YjlhNWZlNDQxNzQzYWE1Nzc2MmQ1ODQ1YjlhOGU3NDBiODE4MDk2ZGJiZGNiNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDI1YTQ0YjVmMGJmNThlZWU3YjlhNWZlNDQxNzQzYWE1Nzc2MmQ1ODQ1YjlhOGU3NDBiODE4MDk2ZGJiZGNiNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTU0LTdhNjIzYjEwIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM1LjciLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDI1YTQ0YjVmMGJmNThlZWU3YjlhNWZlNDQxNzQzYWE1Nzc2MmQ1ODQ1YjlhOGU3NDBiODE4MDk2ZGJiZGNiNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTU0LTdhNjIzYjEwIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzUuNyIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIyNjI2MzA2In0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZWM2ZGMwOWQyZDBiM2E5Y2U3YTkyZmJhZjQ5NTM3YzhmNmMzZDlmNmRjOGRkYmE3M2U5NTc1MWUwNWE2ZTE5ZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDNkN2Q4OTM4NTEzMjY3ZjFlMmM2NWJiOTllNzQzMzAwMDk3ZTc4N2NhNzU3MzZiNjcxY2I5ZWYyYzc1YmUxYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: nil, Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, ResizePolicy: nil, ResourceFieldRef: nil, Resources: {}, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1763578004802703023", - ResourceVersion: "1763578181162527004", - ResourceVersion: "1763578315977999023", - ResourceVersion: "1763578368298895023", - ResourceVersion: "1763578378122735023", - ResourceVersion: "1763578534601727004", - ResourceVersion: "1763578600761695023", - ResourceVersion: "1763578649860687023", - ResourceVersion: "1763578706967439023", - ResourceVersion: "1763578842204335004", - ResourceVersion: "1763578875250031023", - ResourceVersion: "1763579117743951004", - ResourceVersion: "1763579122715119004", - ResourceVersion: "1763579122763935023", - ResourceVersion: "1763579216265007023", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", + SchedulerName: "", - SchedulerName: "default-scheduler", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-env-vars-haproxy", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, + SecurityContext: nil, - SecurityContext: s"&PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmorProfile:nil,SupplementalGroupsPolicy:nil,SELinux"..., Selector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-haproxy", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, ShareProcessNamespace: nil, sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: {}, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-11-19 18:46:07 +0000 UTC", - Time: s"2025-11-19 18:46:44 +0000 UTC", - Time: s"2025-11-19 18:49:41 +0000 UTC", - Time: s"2025-11-19 18:51:38 +0000 UTC", - Time: s"2025-11-19 18:51:55 +0000 UTC", - Time: s"2025-11-19 18:52:05 +0000 UTC", - Time: s"2025-11-19 18:52:48 +0000 UTC", - Time: s"2025-11-19 18:52:57 +0000 UTC", - Time: s"2025-11-19 18:52:58 +0000 UTC", - Time: s"2025-11-19 18:52:58 +0000 UTC", - Time: s"2025-11-19 18:55:34 +0000 UTC", - Time: s"2025-11-19 18:55:41 +0000 UTC", - Time: s"2025-11-19 18:56:40 +0000 UTC", - Time: s"2025-11-19 18:57:06 +0000 UTC", - Time: s"2025-11-19 18:57:29 +0000 UTC", - Time: s"2025-11-19 18:58:12 +0000 UTC", - Time: s"2025-11-19 18:58:26 +0000 UTC", - Time: s"2025-11-19 19:00:42 +0000 UTC", - Time: s"2025-11-19 19:00:53 +0000 UTC", - Time: s"2025-11-19 19:01:15 +0000 UTC", - Time: s"2025-11-19 19:02:47 +0000 UTC", - Time: s"2025-11-19 19:05:17 +0000 UTC", - Time: s"2025-11-19 19:05:22 +0000 UTC", - Time: s"2025-11-19 19:06:56 +0000 UTC", Tolerations: {{Key: "node.alpha.kubernetes.io/unreachable", Operator: "Exists", Effect: "NoExecute", TolerationSeconds: &6000}}, Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "7863ddf1-89d2-491d-9209-dcb3523c76f3", - UID: "78b2ddea-bf44-484e-8ae1-046dc7cdfe59", - UID: "855ca47c-91d9-433c-8558-534b0a279c0f", + UpdatedReplicas: 0, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-haproxy-6f4b8688b4", - UpdateRevision: "some-name-proxysql-5d64bd669b", - UpdateRevision: "some-name-proxysql-5d77458485", - UpdateRevision: "some-name-proxysql-69fd7477b7", - UpdateRevision: "some-name-proxysql-797987f65d", - UpdateRevision: "some-name-proxysql-95fcf5777", - UpdateRevision: "some-name-proxysql-f95ffff75", - UpdateRevision: "some-name-pxc-7cf8467469", - UpdateRevision: "some-name-pxc-7d56b668c6", - UpdateRevision: "some-name-pxc-84dbf4f64b", - UpdateRevision: "some-name-pxc-86d4d79487", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ Value: "", + Value: "caching_sha2_password", ValueFrom: nil, ValueFrom: &v1.EnvVarSource{ - Value: "mysql_native_password", VolumeAttributesClassName: nil, VolumeClaimTemplates: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, - VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-1597 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.fdWK2uIvgz ++ mktemp + local LAST_ERR=/tmp/tmp.D43LdlNzFE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fdWK2uIvgz perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-1597 namespace + cat /tmp/tmp.D43LdlNzFE + rm /tmp/tmp.fdWK2uIvgz /tmp/tmp.D43LdlNzFE + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.gCNItss1el ++ mktemp + local LAST_ERR=/tmp/tmp.cgKkMOAJIB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gCNItss1el No resources found + cat /tmp/tmp.cgKkMOAJIB + rm /tmp/tmp.gCNItss1el /tmp/tmp.cgKkMOAJIB + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.8vSil09rEa ++ mktemp + local LAST_ERR=/tmp/tmp.ICyDH6LJOu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8vSil09rEa No resources found + cat /tmp/tmp.ICyDH6LJOu + rm /tmp/tmp.8vSil09rEa /tmp/tmp.ICyDH6LJOu + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.avgXkfglhb ++ mktemp + local LAST_ERR=/tmp/tmp.QHpzgeZg2Y + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.avgXkfglhb validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.QHpzgeZg2Y + rm /tmp/tmp.avgXkfglhb /tmp/tmp.QHpzgeZg2Y + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-1597 + rm -rf /tmp/tmp.ARylqDbRh4 + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + local LAST_OUT=/tmp/tmp.yVUijC1gXh + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.PL1pNeIlM3 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.833sqNqEPP + local exit_status=0 + local LAST_ERR=/tmp/tmp.2TctqDLGL6 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + for i in '$(seq 0 2)' + set +e + set +e + kubectl delete --grace-period=0 --force=true namespace users-1597 + kubectl delete --grace-period=0 --force=true namespace pxc-operator