Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/logs/users-8-0.log grep: warning: stray \ before - Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-12601 + local ns=users-12601 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-2636 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.LpiXmOoSBy ++ mktemp + local LAST_ERR=/tmp/tmp.JXsjuRqyU7 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LpiXmOoSBy perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-2636 namespace + cat /tmp/tmp.JXsjuRqyU7 + rm /tmp/tmp.LpiXmOoSBy /tmp/tmp.JXsjuRqyU7 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.PvQBnC7VxB ++ mktemp + local LAST_ERR=/tmp/tmp.W4TsDdfJhQ + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PvQBnC7VxB No resources found + cat /tmp/tmp.W4TsDdfJhQ + rm /tmp/tmp.PvQBnC7VxB /tmp/tmp.W4TsDdfJhQ + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ZPBRgDPPZp ++ mktemp + local LAST_ERR=/tmp/tmp.AHJrTesYuU + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZPBRgDPPZp No resources found + cat /tmp/tmp.AHJrTesYuU + rm /tmp/tmp.ZPBRgDPPZp /tmp/tmp.AHJrTesYuU + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' ++ mktemp + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.5UDooSxcoZ ++ mktemp + local LAST_OUT=/tmp/tmp.K7fYb2zUmL ++ mktemp + local LAST_ERR=/tmp/tmp.f5XLodgNpQ + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.LeFhqcCave + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5UDooSxcoZ + cat /tmp/tmp.f5XLodgNpQ + rm /tmp/tmp.5UDooSxcoZ /tmp/tmp.f5XLodgNpQ + return 0 namespace "users-2636" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.K7fYb2zUmL namespace "pxc-operator" deleted + cat /tmp/tmp.LeFhqcCave + rm /tmp/tmp.K7fYb2zUmL /tmp/tmp.LeFhqcCave + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.sUtxb0d5cX ++ mktemp + local LAST_ERR=/tmp/tmp.5uq4m8lKEu + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sUtxb0d5cX namespace/pxc-operator created + cat /tmp/tmp.5uq4m8lKEu + rm /tmp/tmp.sUtxb0d5cX /tmp/tmp.5uq4m8lKEu + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.m2oUKZyZfA +++ mktemp ++ local LAST_ERR=/tmp/tmp.8XoBsd9uiv ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m2oUKZyZfA ++ cat /tmp/tmp.8XoBsd9uiv ++ rm /tmp/tmp.m2oUKZyZfA /tmp/tmp.8XoBsd9uiv ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster9 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.LxjHJIngxJ ++ mktemp + local LAST_ERR=/tmp/tmp.2CeRWgEyFB + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster9 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LxjHJIngxJ Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster9" modified. + cat /tmp/tmp.2CeRWgEyFB + rm /tmp/tmp.LxjHJIngxJ /tmp/tmp.2CeRWgEyFB + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.CHpYYK9Okn ++ mktemp + local LAST_ERR=/tmp/tmp.uLhB0l7AZl + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CHpYYK9Okn customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.uLhB0l7AZl + rm /tmp/tmp.CHpYYK9Okn /tmp/tmp.uLhB0l7AZl + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.Fyh8guaq19 ++ mktemp + local LAST_ERR=/tmp/tmp.dc1GtJCUwJ + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Fyh8guaq19 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.dc1GtJCUwJ + rm /tmp/tmp.Fyh8guaq19 /tmp/tmp.dc1GtJCUwJ + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2182-afafff88^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.8Gms5fTzTW ++ mktemp + local LAST_ERR=/tmp/tmp.Mk8LA1YSj6 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8Gms5fTzTW deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.Mk8LA1YSj6 + rm /tmp/tmp.8Gms5fTzTW /tmp/tmp.Mk8LA1YSj6 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.flPvLnIHiG ++ mktemp + local LAST_ERR=/tmp/tmp.ki7AOtZlUj + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.flPvLnIHiG pod/percona-xtradb-cluster-operator-97b698788-pp7qg condition met + cat /tmp/tmp.ki7AOtZlUj + rm /tmp/tmp.flPvLnIHiG /tmp/tmp.ki7AOtZlUj + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.3YWfGyUDvG +++ mktemp ++ local LAST_ERR=/tmp/tmp.pC5uKLm4J3 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3YWfGyUDvG ++ cat /tmp/tmp.pC5uKLm4J3 ++ rm /tmp/tmp.3YWfGyUDvG /tmp/tmp.pC5uKLm4J3 ++ return 0 + wait_pod percona-xtradb-cluster-operator-97b698788-pp7qg 480 pxc-operator + local pod=percona-xtradb-cluster-operator-97b698788-pp7qg + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-97b698788-pp7qg ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-97b698788-pp7qg condition met waiting for pod/percona-xtradb-cluster-operator-97b698788-pp7qg to become Ready.Ok + sleep 3 + create_namespace users-12601 + local namespace=users-12601 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-12601' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-12601 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-12601 ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.A7g1CCTNe0 egrep: warning: egrep is obsolescent; using grep -E ++ mktemp + local LAST_OUT=/tmp/tmp.EDZoewIKrh + local LAST_ERR=/tmp/tmp.Y12x4HWcKM + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.0BqesCfZBp + local exit_status=0 + for i in $(seq 0 2) + set +e + kubectl get ns ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace users-12601 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace users-12601 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.A7g1CCTNe0 + cat /tmp/tmp.Y12x4HWcKM + rm /tmp/tmp.A7g1CCTNe0 /tmp/tmp.Y12x4HWcKM + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace users-12601 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.EDZoewIKrh + cat /tmp/tmp.0BqesCfZBp Error from server (NotFound): namespaces "users-12601" not found + rm /tmp/tmp.EDZoewIKrh /tmp/tmp.0BqesCfZBp + return 1 + : + wait_for_delete namespace/users-12601 + local res=namespace/users-12601 + echo -n 'waiting for namespace/users-12601 to be deleted' waiting for namespace/users-12601 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-12601" not found + desc 'create namespace users-12601' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-12601 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-12601 ++ mktemp + local LAST_OUT=/tmp/tmp.L9sycH3rOA ++ mktemp + local LAST_ERR=/tmp/tmp.bwh0r2hwJ4 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace users-12601 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.L9sycH3rOA namespace/users-12601 created + cat /tmp/tmp.bwh0r2hwJ4 + rm /tmp/tmp.L9sycH3rOA /tmp/tmp.bwh0r2hwJ4 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.gyIbIRqN4e +++ mktemp ++ local LAST_ERR=/tmp/tmp.vhh5L5oLLV ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gyIbIRqN4e ++ cat /tmp/tmp.vhh5L5oLLV ++ rm /tmp/tmp.gyIbIRqN4e /tmp/tmp.vhh5L5oLLV ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster9 --namespace=users-12601 ++ mktemp + local LAST_OUT=/tmp/tmp.YM7o8efnO0 ++ mktemp + local LAST_ERR=/tmp/tmp.daohjL6No7 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster9 --namespace=users-12601 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YM7o8efnO0 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2182-afafff88-2-cluster9" modified. + cat /tmp/tmp.daohjL6No7 + rm /tmp/tmp.YM7o8efnO0 /tmp/tmp.daohjL6No7 + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.GcP49tQi7z ++ mktemp + local LAST_ERR=/tmp/tmp.tELNAqoJAd + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GcP49tQi7z secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.tELNAqoJAd + rm /tmp/tmp.GcP49tQi7z /tmp/tmp.tELNAqoJAd + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.deHVUGwuvW ++ mktemp + local LAST_ERR=/tmp/tmp.r0XkL9LH3x + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.deHVUGwuvW secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.r0XkL9LH3x + rm /tmp/tmp.deHVUGwuvW /tmp/tmp.r0XkL9LH3x + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/client.yml + /usr/sbin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/sbin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2182-afafff88#' + /usr/sbin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/sbin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.iRmpAebZIM ++ mktemp + /usr/sbin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/sbin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.users-12601~ + local LAST_ERR=/tmp/tmp.m9kPqUhPtB + local exit_status=0 + /usr/sbin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/sbin/sed -e 's#apply:.*#apply: Never#' ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iRmpAebZIM deployment.apps/pxc-client created + cat /tmp/tmp.m9kPqUhPtB + rm /tmp/tmp.iRmpAebZIM /tmp/tmp.m9kPqUhPtB + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/conf/some-name.yml + /usr/sbin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/sbin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/sbin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2182-afafff88#' + /usr/sbin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/sbin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/sbin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_OUT=/tmp/tmp.WnGp18LK2Z + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.users-12601~ ++ mktemp + /usr/sbin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/sbin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/sbin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.qAZ0lM0kBH + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WnGp18LK2Z perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.qAZ0lM0kBH + rm /tmp/tmp.WnGp18LK2Z /tmp/tmp.qAZ0lM0kBH + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Z4Mx1K8qXV ++++ mktemp +++ local LAST_ERR=/tmp/tmp.BBfWhH26nv +++ local exit_status=0 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.Z4Mx1K8qXV +++ cat /tmp/tmp.BBfWhH26nv +++ rm /tmp/tmp.Z4Mx1K8qXV /tmp/tmp.BBfWhH26nv +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.FYdaOd2yO8 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.0QwYpkDZwO +++ local exit_status=0 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.FYdaOd2yO8 +++ cat /tmp/tmp.0QwYpkDZwO +++ rm /tmp/tmp.FYdaOd2yO8 /tmp/tmp.0QwYpkDZwO +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-12601 ++ mktemp + local LAST_OUT=/tmp/tmp.JFgknGBIGh ++ mktemp + local LAST_ERR=/tmp/tmp.gGXzDMpRyD + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-12601 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-12601 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-12601 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.JFgknGBIGh + cat /tmp/tmp.gGXzDMpRyD error: no matching resources found + rm /tmp/tmp.JFgknGBIGh /tmp/tmp.gGXzDMpRyD + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in $(seq 0 $last_pod) + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.XSV1NZhxvl +++ mktemp ++ local LAST_ERR=/tmp/tmp.CaSwQiVhig ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XSV1NZhxvl ++ cat /tmp/tmp.CaSwQiVhig ++ rm /tmp/tmp.XSV1NZhxvl /tmp/tmp.CaSwQiVhig ++ return 0 + local 'root_pass=hkC^#X=eh5)HjXETc' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8q9ys3oUi7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.b8d893yhHU ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8q9ys3oUi7 ++ cat /tmp/tmp.b8d893yhHU ++ rm /tmp/tmp.8q9ys3oUi7 /tmp/tmp.b8d893yhHU ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.100I82Bwk2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.z7VrYcD3XL ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.100I82Bwk2 ++ cat /tmp/tmp.z7VrYcD3XL ++ rm /tmp/tmp.100I82Bwk2 /tmp/tmp.z7VrYcD3XL ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Y1gsmXJU1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jKngzfTxLP ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4Y1gsmXJU1 ++ cat /tmp/tmp.jKngzfTxLP ++ rm /tmp/tmp.4Y1gsmXJU1 /tmp/tmp.jKngzfTxLP ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-1.sql /tmp/tmp.Y8WkxBbrUi/select-1.sql + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.07Hu3Vs5lz +++ mktemp ++ local LAST_ERR=/tmp/tmp.GW1a8qQBM3 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.07Hu3Vs5lz ++ cat /tmp/tmp.GW1a8qQBM3 ++ rm /tmp/tmp.07Hu3Vs5lz /tmp/tmp.GW1a8qQBM3 ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-1.sql /tmp/tmp.Y8WkxBbrUi/select-1.sql + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''hkC^#X=eh5)HjXETc'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TfqsweO3gq +++ mktemp ++ local LAST_ERR=/tmp/tmp.RopbowI1bg ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TfqsweO3gq ++ cat /tmp/tmp.RopbowI1bg ++ rm /tmp/tmp.TfqsweO3gq /tmp/tmp.RopbowI1bg ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-1.sql /tmp/tmp.Y8WkxBbrUi/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.Gw4GXR0muP +++ mktemp ++ local LAST_ERR=/tmp/tmp.cUHE8pAHsV ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Gw4GXR0muP ++ cat /tmp/tmp.cUHE8pAHsV Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.Gw4GXR0muP /tmp/tmp.cUHE8pAHsV ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.AvDaDdZ4wS +++ mktemp ++ local LAST_ERR=/tmp/tmp.OGuu0dzyx8 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AvDaDdZ4wS ++ cat /tmp/tmp.OGuu0dzyx8 ++ rm /tmp/tmp.AvDaDdZ4wS /tmp/tmp.OGuu0dzyx8 ++ return 0 + secret_pass='hkC^#X=eh5)HjXETc' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.cdiAJtxo1S +++ mktemp ++ local LAST_ERR=/tmp/tmp.Unz81EVEAM ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cdiAJtxo1S ++ cat /tmp/tmp.Unz81EVEAM ++ rm /tmp/tmp.cdiAJtxo1S /tmp/tmp.Unz81EVEAM ++ return 0 + int_secret_pass='hkC^#X=eh5)HjXETc' + [[ -z hkC^#X=eh5)HjXETc ]] + [[ hkC^#X=eh5)HjXETc != \h\k\C\^\#\X\=\e\h\5\)\H\j\X\E\T\c ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''hkC^#X=eh5)HjXETc'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''hkC^#X=eh5)HjXETc'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''hkC^#X=eh5)HjXETc'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''hkC^#X=eh5)HjXETc'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bVfzOcuv77 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KCJUjmvEXF ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bVfzOcuv77 ++ cat /tmp/tmp.KCJUjmvEXF ++ rm /tmp/tmp.bVfzOcuv77 /tmp/tmp.KCJUjmvEXF ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y8WkxBbrUi/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Io9Q2YMv8s +++ mktemp ++ local LAST_ERR=/tmp/tmp.uPoHIAiIc0 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Io9Q2YMv8s ++ cat /tmp/tmp.uPoHIAiIc0 ++ rm /tmp/tmp.Io9Q2YMv8s /tmp/tmp.uPoHIAiIc0 ++ return 0 + secret_pass='$2aBU=dCuIAO>%2Y~Tk' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.JeStJxwzTd +++ mktemp ++ local LAST_ERR=/tmp/tmp.fHGRzOVY1G ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JeStJxwzTd ++ cat /tmp/tmp.fHGRzOVY1G ++ rm /tmp/tmp.JeStJxwzTd /tmp/tmp.fHGRzOVY1G ++ return 0 + int_secret_pass='$2aBU=dCuIAO>%2Y~Tk' + [[ -z $2aBU=dCuIAO>%2Y~Tk ]] + [[ $2aBU=dCuIAO>%2Y~Tk != \$\2\a\B\U\=\d\C\u\I\A\O\>\%\2\Y\~\T\k ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''$2aBU=dCuIAO>%2Y~Tk'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''$2aBU=dCuIAO>%2Y~Tk'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''$2aBU=dCuIAO>%2Y~Tk'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''$2aBU=dCuIAO>%2Y~Tk'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q3GzpglGA6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.X1aaJprPkw ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q3GzpglGA6 ++ cat /tmp/tmp.X1aaJprPkw ++ rm /tmp/tmp.Q3GzpglGA6 /tmp/tmp.X1aaJprPkw ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y8WkxBbrUi/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.qDxxVIRG2c +++ mktemp ++ local LAST_ERR=/tmp/tmp.thIabl9pyx ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qDxxVIRG2c ++ cat /tmp/tmp.thIabl9pyx ++ rm /tmp/tmp.qDxxVIRG2c /tmp/tmp.thIabl9pyx ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.om2zC0IjIe +++ mktemp ++ local LAST_ERR=/tmp/tmp.BQPmXE8gnm ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.om2zC0IjIe ++ cat /tmp/tmp.BQPmXE8gnm ++ rm /tmp/tmp.om2zC0IjIe /tmp/tmp.BQPmXE8gnm ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8bbXEyweYI +++ mktemp ++ local LAST_ERR=/tmp/tmp.JYTRSPTYi0 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8bbXEyweYI ++ cat /tmp/tmp.JYTRSPTYi0 ++ rm /tmp/tmp.8bbXEyweYI /tmp/tmp.JYTRSPTYi0 ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y8WkxBbrUi/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.uRVZ2ByOuu +++ mktemp ++ local LAST_ERR=/tmp/tmp.65YTBAx0xx ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uRVZ2ByOuu ++ cat /tmp/tmp.65YTBAx0xx ++ rm /tmp/tmp.uRVZ2ByOuu /tmp/tmp.65YTBAx0xx ++ return 0 + secret_pass='0pfGM}T1jOVZ_MlU0dF' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.3qbFy8y2Oj +++ mktemp ++ local LAST_ERR=/tmp/tmp.jbCFIZKc2v ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3qbFy8y2Oj ++ cat /tmp/tmp.jbCFIZKc2v ++ rm /tmp/tmp.3qbFy8y2Oj /tmp/tmp.jbCFIZKc2v ++ return 0 + int_secret_pass='0pfGM}T1jOVZ_MlU0dF' + [[ -z 0pfGM}T1jOVZ_MlU0dF ]] + [[ 0pfGM}T1jOVZ_MlU0dF != \0\p\f\G\M\}\T\1\j\O\V\Z\_\M\l\U\0\d\F ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''0pfGM}T1jOVZ_MlU0dF'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''0pfGM}T1jOVZ_MlU0dF'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''0pfGM}T1jOVZ_MlU0dF'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''0pfGM}T1jOVZ_MlU0dF'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace egrep: warning: egrep is obsolescent; using grep -E + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-2.sql /tmp/tmp.Y8WkxBbrUi/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.vDV3MdYWEL +++ mktemp ++ local LAST_ERR=/tmp/tmp.9BbuQgqXs2 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vDV3MdYWEL ++ cat /tmp/tmp.9BbuQgqXs2 ++ rm /tmp/tmp.vDV3MdYWEL /tmp/tmp.9BbuQgqXs2 ++ return 0 + secret_pass='@^]w9$dP_5Re-N.aJf)' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.xP1sG98Dmg +++ mktemp ++ local LAST_ERR=/tmp/tmp.c5NKMS0Jdg ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xP1sG98Dmg ++ cat /tmp/tmp.c5NKMS0Jdg ++ rm /tmp/tmp.xP1sG98Dmg /tmp/tmp.c5NKMS0Jdg ++ return 0 + int_secret_pass='@^]w9$dP_5Re-N.aJf)' + [[ -z @^]w9$dP_5Re-N.aJf) ]] + [[ @^]w9$dP_5Re-N.aJf) != \@\^\]\w\9\$\d\P\_\5\R\e\-\N\.\a\J\f\) ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''@^]w9$dP_5Re-N.aJf)'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''@^]w9$dP_5Re-N.aJf)'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''@^]w9$dP_5Re-N.aJf)'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''@^]w9$dP_5Re-N.aJf)'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EGOo7FaVwI +++ mktemp ++ local LAST_ERR=/tmp/tmp.fMHLgX5es2 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EGOo7FaVwI ++ cat /tmp/tmp.fMHLgX5es2 ++ rm /tmp/tmp.EGOo7FaVwI /tmp/tmp.fMHLgX5es2 ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y8WkxBbrUi/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.wTXhCWrn5O +++ mktemp ++ local LAST_ERR=/tmp/tmp.uNpYC7HNZ5 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wTXhCWrn5O ++ cat /tmp/tmp.uNpYC7HNZ5 ++ rm /tmp/tmp.wTXhCWrn5O /tmp/tmp.uNpYC7HNZ5 ++ return 0 + secret_pass='OK,bwcjLT(jltGhs' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.piWn42H2Wh +++ mktemp ++ local LAST_ERR=/tmp/tmp.jaNHkADJSI ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.piWn42H2Wh ++ cat /tmp/tmp.jaNHkADJSI ++ rm /tmp/tmp.piWn42H2Wh /tmp/tmp.jaNHkADJSI ++ return 0 + int_secret_pass='OK,bwcjLT(jltGhs' + [[ -z OK,bwcjLT(jltGhs ]] + [[ OK,bwcjLT(jltGhs != \O\K\,\b\w\c\j\L\T\(\j\l\t\G\h\s ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''OK,bwcjLT(jltGhs'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''OK,bwcjLT(jltGhs'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''OK,bwcjLT(jltGhs'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''OK,bwcjLT(jltGhs'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jmfuFC8jRM +++ mktemp ++ local LAST_ERR=/tmp/tmp.DggFFsUn1x ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jmfuFC8jRM ++ cat /tmp/tmp.DggFFsUn1x ++ rm /tmp/tmp.jmfuFC8jRM /tmp/tmp.DggFFsUn1x ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y8WkxBbrUi/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.tzqg8vAjIX ++ mktemp + local LAST_ERR=/tmp/tmp.XCEVvgy9hL + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tzqg8vAjIX secret/my-cluster-secrets patched + cat /tmp/tmp.XCEVvgy9hL + rm /tmp/tmp.tzqg8vAjIX /tmp/tmp.XCEVvgy9hL + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TLNPPRQm8e +++ mktemp ++ local LAST_ERR=/tmp/tmp.6eb1lQEhzQ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TLNPPRQm8e ++ cat /tmp/tmp.6eb1lQEhzQ ++ rm /tmp/tmp.TLNPPRQm8e /tmp/tmp.6eb1lQEhzQ ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y8WkxBbrUi/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.DzZHkg1cOg ++ mktemp + local LAST_ERR=/tmp/tmp.WSJDIAMloi + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DzZHkg1cOg perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.WSJDIAMloi + rm /tmp/tmp.DzZHkg1cOg /tmp/tmp.WSJDIAMloi + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RpfsmtIZ7O +++ mktemp ++ local LAST_ERR=/tmp/tmp.r9eQ27rTFD ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RpfsmtIZ7O ++ cat /tmp/tmp.r9eQ27rTFD ++ rm /tmp/tmp.RpfsmtIZ7O /tmp/tmp.r9eQ27rTFD ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r4lawkFqXF +++ mktemp ++ local LAST_ERR=/tmp/tmp.sTEerlxTKS ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r4lawkFqXF ++ cat /tmp/tmp.sTEerlxTKS ++ rm /tmp/tmp.r4lawkFqXF /tmp/tmp.sTEerlxTKS ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qNU0bgKR3v ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.FSY4SSXQSy +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qNU0bgKR3v +++++ cat /tmp/tmp.FSY4SSXQSy +++++ rm /tmp/tmp.qNU0bgKR3v /tmp/tmp.FSY4SSXQSy +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RnvyPH5dGj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UIFuO5c8kM +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RnvyPH5dGj +++++ cat /tmp/tmp.UIFuO5c8kM +++++ rm /tmp/tmp.RnvyPH5dGj /tmp/tmp.UIFuO5c8kM +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vrBnEYTXc7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2Y5MaOaVnf ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vrBnEYTXc7 ++ cat /tmp/tmp.2Y5MaOaVnf ++ rm /tmp/tmp.vrBnEYTXc7 /tmp/tmp.2Y5MaOaVnf ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.25adTEluYH ++ mktemp + local LAST_ERR=/tmp/tmp.lDDalKg3KC + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.25adTEluYH secret/my-cluster-secrets patched + cat /tmp/tmp.lDDalKg3KC + rm /tmp/tmp.25adTEluYH /tmp/tmp.lDDalKg3KC + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SrQ2gqfazk +++ mktemp ++ local LAST_ERR=/tmp/tmp.1I10WcN62z ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SrQ2gqfazk ++ cat /tmp/tmp.1I10WcN62z ++ rm /tmp/tmp.SrQ2gqfazk /tmp/tmp.1I10WcN62z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8LUduatXLM +++ mktemp ++ local LAST_ERR=/tmp/tmp.zYKACIqt3h ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8LUduatXLM ++ cat /tmp/tmp.zYKACIqt3h ++ rm /tmp/tmp.8LUduatXLM /tmp/tmp.zYKACIqt3h ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f5Eae9efM9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8oxuh0GWzl ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f5Eae9efM9 ++ cat /tmp/tmp.8oxuh0GWzl ++ rm /tmp/tmp.f5Eae9efM9 /tmp/tmp.8oxuh0GWzl ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rtoSeBPygz +++ mktemp ++ local LAST_ERR=/tmp/tmp.2N9SJrS7GN ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rtoSeBPygz ++ cat /tmp/tmp.2N9SJrS7GN ++ rm /tmp/tmp.rtoSeBPygz /tmp/tmp.2N9SJrS7GN ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.BLDlTs1d84 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.b41rKGRlNc +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.BLDlTs1d84 +++++ cat /tmp/tmp.b41rKGRlNc +++++ rm /tmp/tmp.BLDlTs1d84 /tmp/tmp.b41rKGRlNc +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.z2OuQ7dVcu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7QoqeRsOcC +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.z2OuQ7dVcu +++++ cat /tmp/tmp.7QoqeRsOcC +++++ rm /tmp/tmp.z2OuQ7dVcu /tmp/tmp.7QoqeRsOcC +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gSXkZe5jpI +++ mktemp ++ local LAST_ERR=/tmp/tmp.1tZIzIIjR8 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gSXkZe5jpI ++ cat /tmp/tmp.1tZIzIIjR8 ++ rm /tmp/tmp.gSXkZe5jpI /tmp/tmp.1tZIzIIjR8 ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace egrep: warning: egrep is obsolescent; using grep -E + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-2.sql /tmp/tmp.Y8WkxBbrUi/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace egrep: warning: egrep is obsolescent; using grep -E + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-2.sql /tmp/tmp.Y8WkxBbrUi/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace egrep: warning: egrep is obsolescent; using grep -E + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-2.sql /tmp/tmp.Y8WkxBbrUi/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.oPMFS9y4R9 ++ mktemp + local LAST_ERR=/tmp/tmp.EVyPhKNm6o + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oPMFS9y4R9 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.EVyPhKNm6o + rm /tmp/tmp.oPMFS9y4R9 /tmp/tmp.EVyPhKNm6o + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jr5R9ATcIK ++ mktemp + local LAST_ERR=/tmp/tmp.WxUeQph6E0 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jr5R9ATcIK secret/my-cluster-secrets patched + cat /tmp/tmp.WxUeQph6E0 + rm /tmp/tmp.jr5R9ATcIK /tmp/tmp.WxUeQph6E0 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.35LVghtBq4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.04Tk1FigqJ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.35LVghtBq4 ++ cat /tmp/tmp.04Tk1FigqJ ++ rm /tmp/tmp.35LVghtBq4 /tmp/tmp.04Tk1FigqJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6AQVgFva6K +++ mktemp ++ local LAST_ERR=/tmp/tmp.AJRHS8Dh89 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6AQVgFva6K ++ cat /tmp/tmp.AJRHS8Dh89 ++ rm /tmp/tmp.6AQVgFva6K /tmp/tmp.AJRHS8Dh89 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6dL5oMo3gb +++ mktemp ++ local LAST_ERR=/tmp/tmp.NiDnOp632U ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6dL5oMo3gb ++ cat /tmp/tmp.NiDnOp632U ++ rm /tmp/tmp.6dL5oMo3gb /tmp/tmp.NiDnOp632U ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6IVpLuYXVC +++ mktemp ++ local LAST_ERR=/tmp/tmp.plQXh3hVzR ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6IVpLuYXVC ++ cat /tmp/tmp.plQXh3hVzR ++ rm /tmp/tmp.6IVpLuYXVC /tmp/tmp.plQXh3hVzR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YqIYMGznIf +++ mktemp ++ local LAST_ERR=/tmp/tmp.JFo6yRro6R ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YqIYMGznIf ++ cat /tmp/tmp.JFo6yRro6R ++ rm /tmp/tmp.YqIYMGznIf /tmp/tmp.JFo6yRro6R ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JAZg1Lf7DK +++ mktemp ++ local LAST_ERR=/tmp/tmp.gepehxF92E ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JAZg1Lf7DK ++ cat /tmp/tmp.gepehxF92E ++ rm /tmp/tmp.JAZg1Lf7DK /tmp/tmp.gepehxF92E ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GCmx5IK5OG +++ mktemp ++ local LAST_ERR=/tmp/tmp.TCK9yDPphT ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GCmx5IK5OG ++ cat /tmp/tmp.TCK9yDPphT ++ rm /tmp/tmp.GCmx5IK5OG /tmp/tmp.TCK9yDPphT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vlsmxUux5P +++ mktemp ++ local LAST_ERR=/tmp/tmp.PhcBfI67zC ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vlsmxUux5P ++ cat /tmp/tmp.PhcBfI67zC ++ rm /tmp/tmp.vlsmxUux5P /tmp/tmp.PhcBfI67zC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hlxPANUolt +++ mktemp ++ local LAST_ERR=/tmp/tmp.imyLzpcs6t ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hlxPANUolt ++ cat /tmp/tmp.imyLzpcs6t ++ rm /tmp/tmp.hlxPANUolt /tmp/tmp.imyLzpcs6t ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WXAvx7GyXc +++ mktemp ++ local LAST_ERR=/tmp/tmp.C1I4ORoJNm ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WXAvx7GyXc ++ cat /tmp/tmp.C1I4ORoJNm ++ rm /tmp/tmp.WXAvx7GyXc /tmp/tmp.C1I4ORoJNm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CgrWCrvdKI +++ mktemp ++ local LAST_ERR=/tmp/tmp.NIqXbMyYNV ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CgrWCrvdKI ++ cat /tmp/tmp.NIqXbMyYNV ++ rm /tmp/tmp.CgrWCrvdKI /tmp/tmp.NIqXbMyYNV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8sRnLluxik +++ mktemp ++ local LAST_ERR=/tmp/tmp.s2dwpoGxyc ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8sRnLluxik ++ cat /tmp/tmp.s2dwpoGxyc ++ rm /tmp/tmp.8sRnLluxik /tmp/tmp.s2dwpoGxyc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ozgK3hv6Sc +++ mktemp ++ local LAST_ERR=/tmp/tmp.ANXFCrgluu ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ozgK3hv6Sc ++ cat /tmp/tmp.ANXFCrgluu ++ rm /tmp/tmp.ozgK3hv6Sc /tmp/tmp.ANXFCrgluu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f1pv3jzHGQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.UqLxrdmRF4 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f1pv3jzHGQ ++ cat /tmp/tmp.UqLxrdmRF4 ++ rm /tmp/tmp.f1pv3jzHGQ /tmp/tmp.UqLxrdmRF4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Yk2jSj87JH +++ mktemp ++ local LAST_ERR=/tmp/tmp.KkCRV78gIa ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Yk2jSj87JH ++ cat /tmp/tmp.KkCRV78gIa ++ rm /tmp/tmp.Yk2jSj87JH /tmp/tmp.KkCRV78gIa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gv8IsoLMv3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nzLwxqdVHN ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gv8IsoLMv3 ++ cat /tmp/tmp.nzLwxqdVHN ++ rm /tmp/tmp.gv8IsoLMv3 /tmp/tmp.nzLwxqdVHN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dGz30hPr4n +++ mktemp ++ local LAST_ERR=/tmp/tmp.3AdmhkFtiG ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dGz30hPr4n ++ cat /tmp/tmp.3AdmhkFtiG ++ rm /tmp/tmp.dGz30hPr4n /tmp/tmp.3AdmhkFtiG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Uitxi9OVzH +++ mktemp ++ local LAST_ERR=/tmp/tmp.QSvpuWSyiv ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Uitxi9OVzH ++ cat /tmp/tmp.QSvpuWSyiv ++ rm /tmp/tmp.Uitxi9OVzH /tmp/tmp.QSvpuWSyiv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Eep2duuOJF +++ mktemp ++ local LAST_ERR=/tmp/tmp.i1tObVWIrE ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Eep2duuOJF ++ cat /tmp/tmp.i1tObVWIrE ++ rm /tmp/tmp.Eep2duuOJF /tmp/tmp.i1tObVWIrE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6hmWSkEOmA +++ mktemp ++ local LAST_ERR=/tmp/tmp.IPDo9UVPLM ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6hmWSkEOmA ++ cat /tmp/tmp.IPDo9UVPLM ++ rm /tmp/tmp.6hmWSkEOmA /tmp/tmp.IPDo9UVPLM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M8sslTBp5e +++ mktemp ++ local LAST_ERR=/tmp/tmp.2nQFn4Se3C ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M8sslTBp5e ++ cat /tmp/tmp.2nQFn4Se3C ++ rm /tmp/tmp.M8sslTBp5e /tmp/tmp.2nQFn4Se3C ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8qsSmIsObm +++ mktemp ++ local LAST_ERR=/tmp/tmp.5RBCW1zXzA ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8qsSmIsObm ++ cat /tmp/tmp.5RBCW1zXzA ++ rm /tmp/tmp.8qsSmIsObm /tmp/tmp.5RBCW1zXzA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FLyRbIXn3f +++ mktemp ++ local LAST_ERR=/tmp/tmp.cQGiuMtjYj ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FLyRbIXn3f ++ cat /tmp/tmp.cQGiuMtjYj ++ rm /tmp/tmp.FLyRbIXn3f /tmp/tmp.cQGiuMtjYj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gmmFjSRxq6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0m5msSC8Of ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gmmFjSRxq6 ++ cat /tmp/tmp.0m5msSC8Of ++ rm /tmp/tmp.gmmFjSRxq6 /tmp/tmp.0m5msSC8Of ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5Vw2JGRTus +++ mktemp ++ local LAST_ERR=/tmp/tmp.f6MvqmMF7r ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5Vw2JGRTus ++ cat /tmp/tmp.f6MvqmMF7r ++ rm /tmp/tmp.5Vw2JGRTus /tmp/tmp.f6MvqmMF7r ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.TyHvYnIUzL ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.WgbvHpiTQ7 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.TyHvYnIUzL +++++ cat /tmp/tmp.WgbvHpiTQ7 +++++ rm /tmp/tmp.TyHvYnIUzL /tmp/tmp.WgbvHpiTQ7 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.cR0m1JsAQI ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xY1H74C6ml +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.cR0m1JsAQI +++++ cat /tmp/tmp.xY1H74C6ml +++++ rm /tmp/tmp.cR0m1JsAQI /tmp/tmp.xY1H74C6ml +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s8DxfOXzpx +++ mktemp ++ local LAST_ERR=/tmp/tmp.q6SxNOKj5T ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s8DxfOXzpx ++ cat /tmp/tmp.q6SxNOKj5T ++ rm /tmp/tmp.s8DxfOXzpx /tmp/tmp.q6SxNOKj5T ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-3-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace egrep: warning: egrep is obsolescent; using grep -E + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-3.sql /tmp/tmp.Y8WkxBbrUi/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Y4FrIrT4Z2 ++ mktemp + local LAST_ERR=/tmp/tmp.F0noSsia6r + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Y4FrIrT4Z2 secret/my-cluster-secrets patched + cat /tmp/tmp.F0noSsia6r + rm /tmp/tmp.Y4FrIrT4Z2 /tmp/tmp.F0noSsia6r + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.KJN0lTT9mA +++ mktemp ++ local LAST_ERR=/tmp/tmp.NGb22LM6g4 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KJN0lTT9mA ++ cat /tmp/tmp.NGb22LM6g4 ++ rm /tmp/tmp.KJN0lTT9mA /tmp/tmp.NGb22LM6g4 ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0wtWP1rijt +++ mktemp ++ local LAST_ERR=/tmp/tmp.G085iaNtDx ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0wtWP1rijt ++ cat /tmp/tmp.G085iaNtDx ++ rm /tmp/tmp.0wtWP1rijt /tmp/tmp.G085iaNtDx ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k1iTkFsWbF +++ mktemp ++ local LAST_ERR=/tmp/tmp.boIwo5Lnh2 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k1iTkFsWbF ++ cat /tmp/tmp.boIwo5Lnh2 ++ rm /tmp/tmp.k1iTkFsWbF /tmp/tmp.boIwo5Lnh2 ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.32k6mECxZA +++ mktemp ++ local LAST_ERR=/tmp/tmp.bA5qsEnhkF ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.32k6mECxZA ++ cat /tmp/tmp.bA5qsEnhkF ++ rm /tmp/tmp.32k6mECxZA /tmp/tmp.bA5qsEnhkF ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tSoFUdjjaF +++ mktemp ++ local LAST_ERR=/tmp/tmp.UosCuUuFNC ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tSoFUdjjaF ++ cat /tmp/tmp.UosCuUuFNC ++ rm /tmp/tmp.tSoFUdjjaF /tmp/tmp.UosCuUuFNC ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e6HKMyd5A5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7VEbKlYKZK ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e6HKMyd5A5 ++ cat /tmp/tmp.7VEbKlYKZK ++ rm /tmp/tmp.e6HKMyd5A5 /tmp/tmp.7VEbKlYKZK ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MSAYMQaG8Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.HWVa03bJ5o ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MSAYMQaG8Z ++ cat /tmp/tmp.HWVa03bJ5o ++ rm /tmp/tmp.MSAYMQaG8Z /tmp/tmp.HWVa03bJ5o ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RmnImo5Z8H +++ mktemp ++ local LAST_ERR=/tmp/tmp.G1ObexEHF1 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RmnImo5Z8H ++ cat /tmp/tmp.G1ObexEHF1 ++ rm /tmp/tmp.RmnImo5Z8H /tmp/tmp.G1ObexEHF1 ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qEVbqwQ4cV +++ mktemp ++ local LAST_ERR=/tmp/tmp.Uiy1FSghSv ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qEVbqwQ4cV ++ cat /tmp/tmp.Uiy1FSghSv ++ rm /tmp/tmp.qEVbqwQ4cV /tmp/tmp.Uiy1FSghSv ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9embfSM3kG +++ mktemp ++ local LAST_ERR=/tmp/tmp.hUFomVEw75 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9embfSM3kG ++ cat /tmp/tmp.hUFomVEw75 ++ rm /tmp/tmp.9embfSM3kG /tmp/tmp.hUFomVEw75 ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k261DTTESk +++ mktemp ++ local LAST_ERR=/tmp/tmp.2eWijltmc7 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k261DTTESk ++ cat /tmp/tmp.2eWijltmc7 ++ rm /tmp/tmp.k261DTTESk /tmp/tmp.2eWijltmc7 ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0XIGjc1J6m +++ mktemp ++ local LAST_ERR=/tmp/tmp.gy1Noj2jn2 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0XIGjc1J6m ++ cat /tmp/tmp.gy1Noj2jn2 ++ rm /tmp/tmp.0XIGjc1J6m /tmp/tmp.gy1Noj2jn2 ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XmiP4I1sI2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ylffjjd3SB ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XmiP4I1sI2 ++ cat /tmp/tmp.Ylffjjd3SB ++ rm /tmp/tmp.XmiP4I1sI2 /tmp/tmp.Ylffjjd3SB ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZfKSaswr33 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ykKDnXNin7 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZfKSaswr33 ++ cat /tmp/tmp.ykKDnXNin7 ++ rm /tmp/tmp.ZfKSaswr33 /tmp/tmp.ykKDnXNin7 ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bjeRS1SGBl +++ mktemp ++ local LAST_ERR=/tmp/tmp.L6Z9Z3Uw8K ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bjeRS1SGBl ++ cat /tmp/tmp.L6Z9Z3Uw8K ++ rm /tmp/tmp.bjeRS1SGBl /tmp/tmp.L6Z9Z3Uw8K ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zk8s7lxA8c +++ mktemp ++ local LAST_ERR=/tmp/tmp.0asx87WuWy ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zk8s7lxA8c ++ cat /tmp/tmp.0asx87WuWy ++ rm /tmp/tmp.zk8s7lxA8c /tmp/tmp.0asx87WuWy ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nZ1E1WdoPn +++ mktemp ++ local LAST_ERR=/tmp/tmp.e63srZOcvL ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nZ1E1WdoPn ++ cat /tmp/tmp.e63srZOcvL ++ rm /tmp/tmp.nZ1E1WdoPn /tmp/tmp.e63srZOcvL ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.32eCYaWZpq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.JSOkONdhaI +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.32eCYaWZpq +++++ cat /tmp/tmp.JSOkONdhaI +++++ rm /tmp/tmp.32eCYaWZpq /tmp/tmp.JSOkONdhaI +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.PvAEgILoOt ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Nmp47XYtyf +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.PvAEgILoOt +++++ cat /tmp/tmp.Nmp47XYtyf +++++ rm /tmp/tmp.PvAEgILoOt /tmp/tmp.Nmp47XYtyf +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cMEbW2sSab +++ mktemp ++ local LAST_ERR=/tmp/tmp.zQvxnyffIG ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cMEbW2sSab ++ cat /tmp/tmp.zQvxnyffIG ++ rm /tmp/tmp.cMEbW2sSab /tmp/tmp.zQvxnyffIG ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YlcODlpaci +++ mktemp ++ local LAST_ERR=/tmp/tmp.CpC47UjmOe ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YlcODlpaci ++ cat /tmp/tmp.CpC47UjmOe ++ rm /tmp/tmp.YlcODlpaci /tmp/tmp.CpC47UjmOe ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y8WkxBbrUi/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.iIlALlde9Y ++ mktemp + local LAST_ERR=/tmp/tmp.rJE4xFUXbT + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iIlALlde9Y secret/my-cluster-secrets patched + cat /tmp/tmp.rJE4xFUXbT + rm /tmp/tmp.iIlALlde9Y /tmp/tmp.rJE4xFUXbT + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SF1AfrwQcH +++ mktemp ++ local LAST_ERR=/tmp/tmp.pJDQrFk4lq ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SF1AfrwQcH ++ cat /tmp/tmp.pJDQrFk4lq ++ rm /tmp/tmp.SF1AfrwQcH /tmp/tmp.pJDQrFk4lq ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gBOw522KAH +++ mktemp ++ local LAST_ERR=/tmp/tmp.G3PjuvFvtG ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gBOw522KAH ++ cat /tmp/tmp.G3PjuvFvtG ++ rm /tmp/tmp.gBOw522KAH /tmp/tmp.G3PjuvFvtG ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qVFdTU4zcF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.5KRGrweLle +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qVFdTU4zcF +++++ cat /tmp/tmp.5KRGrweLle +++++ rm /tmp/tmp.qVFdTU4zcF /tmp/tmp.5KRGrweLle +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.eThrNXWvCr ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7tnQhVr4iK +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.eThrNXWvCr +++++ cat /tmp/tmp.7tnQhVr4iK +++++ rm /tmp/tmp.eThrNXWvCr /tmp/tmp.7tnQhVr4iK +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E2feUq45es +++ mktemp ++ local LAST_ERR=/tmp/tmp.sTII6z6qLr ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E2feUq45es ++ cat /tmp/tmp.sTII6z6qLr ++ rm /tmp/tmp.E2feUq45es /tmp/tmp.sTII6z6qLr ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KiqAbm0Kl1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tRax9APEwJ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KiqAbm0Kl1 ++ cat /tmp/tmp.tRax9APEwJ ++ rm /tmp/tmp.KiqAbm0Kl1 /tmp/tmp.tRax9APEwJ ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y8WkxBbrUi/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.kQI8UwpVBJ ++ mktemp + local LAST_ERR=/tmp/tmp.O5V79peXt0 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kQI8UwpVBJ perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.O5V79peXt0 + rm /tmp/tmp.kQI8UwpVBJ /tmp/tmp.O5V79peXt0 + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SlBdShV6T4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DplIRNBaGl ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SlBdShV6T4 ++ cat /tmp/tmp.DplIRNBaGl ++ rm /tmp/tmp.SlBdShV6T4 /tmp/tmp.DplIRNBaGl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R68QGrLPJs +++ mktemp ++ local LAST_ERR=/tmp/tmp.0JXKtO4Cj0 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.R68QGrLPJs ++ cat /tmp/tmp.0JXKtO4Cj0 ++ rm /tmp/tmp.R68QGrLPJs /tmp/tmp.0JXKtO4Cj0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2tDS6HMvr6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8cCwOg3BhF ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2tDS6HMvr6 ++ cat /tmp/tmp.8cCwOg3BhF ++ rm /tmp/tmp.2tDS6HMvr6 /tmp/tmp.8cCwOg3BhF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TPUaS0YqHy +++ mktemp ++ local LAST_ERR=/tmp/tmp.gpNVvQWb44 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TPUaS0YqHy ++ cat /tmp/tmp.gpNVvQWb44 ++ rm /tmp/tmp.TPUaS0YqHy /tmp/tmp.gpNVvQWb44 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9NHib6JJLQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.DG94PfxN8j ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9NHib6JJLQ ++ cat /tmp/tmp.DG94PfxN8j ++ rm /tmp/tmp.9NHib6JJLQ /tmp/tmp.DG94PfxN8j ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5QxK0m19FM +++ mktemp ++ local LAST_ERR=/tmp/tmp.6fK6EJLiMw ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5QxK0m19FM ++ cat /tmp/tmp.6fK6EJLiMw ++ rm /tmp/tmp.5QxK0m19FM /tmp/tmp.6fK6EJLiMw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MXtFUBERYy +++ mktemp ++ local LAST_ERR=/tmp/tmp.CVER1JBpV6 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MXtFUBERYy ++ cat /tmp/tmp.CVER1JBpV6 ++ rm /tmp/tmp.MXtFUBERYy /tmp/tmp.CVER1JBpV6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HpDwy7cJ1L +++ mktemp ++ local LAST_ERR=/tmp/tmp.180egWlkp1 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HpDwy7cJ1L ++ cat /tmp/tmp.180egWlkp1 ++ rm /tmp/tmp.HpDwy7cJ1L /tmp/tmp.180egWlkp1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AmTn0Z1V2h +++ mktemp ++ local LAST_ERR=/tmp/tmp.JYbcnw0ABe ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AmTn0Z1V2h ++ cat /tmp/tmp.JYbcnw0ABe ++ rm /tmp/tmp.AmTn0Z1V2h /tmp/tmp.JYbcnw0ABe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7FHz4qCCPi +++ mktemp ++ local LAST_ERR=/tmp/tmp.KjKKRjF3it ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7FHz4qCCPi ++ cat /tmp/tmp.KjKKRjF3it ++ rm /tmp/tmp.7FHz4qCCPi /tmp/tmp.KjKKRjF3it ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Nf5V9A0y45 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bHyTIMwzWC ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Nf5V9A0y45 ++ cat /tmp/tmp.bHyTIMwzWC ++ rm /tmp/tmp.Nf5V9A0y45 /tmp/tmp.bHyTIMwzWC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YVyPgxLb6j +++ mktemp ++ local LAST_ERR=/tmp/tmp.8pW1f3ByD2 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YVyPgxLb6j ++ cat /tmp/tmp.8pW1f3ByD2 ++ rm /tmp/tmp.YVyPgxLb6j /tmp/tmp.8pW1f3ByD2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.35u817MHj0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aa7aKxm5Lk ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.35u817MHj0 ++ cat /tmp/tmp.aa7aKxm5Lk ++ rm /tmp/tmp.35u817MHj0 /tmp/tmp.aa7aKxm5Lk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SpAICqf9Hm +++ mktemp ++ local LAST_ERR=/tmp/tmp.qxclJpiyJ6 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SpAICqf9Hm ++ cat /tmp/tmp.qxclJpiyJ6 ++ rm /tmp/tmp.SpAICqf9Hm /tmp/tmp.qxclJpiyJ6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vmwM9tRWG5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.5tr2shNy5a ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vmwM9tRWG5 ++ cat /tmp/tmp.5tr2shNy5a ++ rm /tmp/tmp.vmwM9tRWG5 /tmp/tmp.5tr2shNy5a ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VjnqZ1Vk1D +++ mktemp ++ local LAST_ERR=/tmp/tmp.5PaXQCbIhA ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VjnqZ1Vk1D ++ cat /tmp/tmp.5PaXQCbIhA ++ rm /tmp/tmp.VjnqZ1Vk1D /tmp/tmp.5PaXQCbIhA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hFu7lOkpP8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3V1NED5dVT ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hFu7lOkpP8 ++ cat /tmp/tmp.3V1NED5dVT ++ rm /tmp/tmp.hFu7lOkpP8 /tmp/tmp.3V1NED5dVT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RYSq3SchOA +++ mktemp ++ local LAST_ERR=/tmp/tmp.9xWgR9HqXO ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RYSq3SchOA ++ cat /tmp/tmp.9xWgR9HqXO ++ rm /tmp/tmp.RYSq3SchOA /tmp/tmp.9xWgR9HqXO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I6ToxEphyM +++ mktemp ++ local LAST_ERR=/tmp/tmp.0exFEXBOWE ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I6ToxEphyM ++ cat /tmp/tmp.0exFEXBOWE ++ rm /tmp/tmp.I6ToxEphyM /tmp/tmp.0exFEXBOWE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RmgQZIe1oE +++ mktemp ++ local LAST_ERR=/tmp/tmp.yp9j4SQX5t ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RmgQZIe1oE ++ cat /tmp/tmp.yp9j4SQX5t ++ rm /tmp/tmp.RmgQZIe1oE /tmp/tmp.yp9j4SQX5t ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z3GM21pUOt +++ mktemp ++ local LAST_ERR=/tmp/tmp.gBvbLmFFD6 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z3GM21pUOt ++ cat /tmp/tmp.gBvbLmFFD6 ++ rm /tmp/tmp.z3GM21pUOt /tmp/tmp.gBvbLmFFD6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QZ3oCzR5Cj +++ mktemp ++ local LAST_ERR=/tmp/tmp.wO4skozQ0M ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QZ3oCzR5Cj ++ cat /tmp/tmp.wO4skozQ0M ++ rm /tmp/tmp.QZ3oCzR5Cj /tmp/tmp.wO4skozQ0M ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xd4YaVmfEQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.A1GMCLrk9O ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Xd4YaVmfEQ ++ cat /tmp/tmp.A1GMCLrk9O ++ rm /tmp/tmp.Xd4YaVmfEQ /tmp/tmp.A1GMCLrk9O ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.EGrtEAKAS2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZFJYyqkodz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.EGrtEAKAS2 +++++ cat /tmp/tmp.ZFJYyqkodz +++++ rm /tmp/tmp.EGrtEAKAS2 /tmp/tmp.ZFJYyqkodz +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.bdyz56TJfY ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.eHqBY5D4ga +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.bdyz56TJfY +++++ cat /tmp/tmp.eHqBY5D4ga +++++ rm /tmp/tmp.bdyz56TJfY /tmp/tmp.eHqBY5D4ga +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bt8YanonvL +++ mktemp ++ local LAST_ERR=/tmp/tmp.VtiDB5V9ar ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bt8YanonvL ++ cat /tmp/tmp.VtiDB5V9ar ++ rm /tmp/tmp.bt8YanonvL /tmp/tmp.VtiDB5V9ar ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.fbXbsWMNRV ++ mktemp + local LAST_ERR=/tmp/tmp.ZpErTUSuQu + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fbXbsWMNRV secret/my-cluster-secrets-2 patched + cat /tmp/tmp.ZpErTUSuQu + rm /tmp/tmp.fbXbsWMNRV /tmp/tmp.ZpErTUSuQu + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EaY8y2yjve +++ mktemp ++ local LAST_ERR=/tmp/tmp.O2MXKWUvGF ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EaY8y2yjve ++ cat /tmp/tmp.O2MXKWUvGF ++ rm /tmp/tmp.EaY8y2yjve /tmp/tmp.O2MXKWUvGF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LXbRCBBlG0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ujwPHhUshr ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LXbRCBBlG0 ++ cat /tmp/tmp.ujwPHhUshr ++ rm /tmp/tmp.LXbRCBBlG0 /tmp/tmp.ujwPHhUshr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oF3lsBedcN +++ mktemp ++ local LAST_ERR=/tmp/tmp.ia2rwOHGYD ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oF3lsBedcN ++ cat /tmp/tmp.ia2rwOHGYD ++ rm /tmp/tmp.oF3lsBedcN /tmp/tmp.ia2rwOHGYD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jl3BSiRVUs +++ mktemp ++ local LAST_ERR=/tmp/tmp.W9eqBP3S6m ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Jl3BSiRVUs ++ cat /tmp/tmp.W9eqBP3S6m ++ rm /tmp/tmp.Jl3BSiRVUs /tmp/tmp.W9eqBP3S6m ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SVabkyE69u +++ mktemp ++ local LAST_ERR=/tmp/tmp.j2Xwakxe9w ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SVabkyE69u ++ cat /tmp/tmp.j2Xwakxe9w ++ rm /tmp/tmp.SVabkyE69u /tmp/tmp.j2Xwakxe9w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zu3fFLI3Ft +++ mktemp ++ local LAST_ERR=/tmp/tmp.HswNfkEqGX ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zu3fFLI3Ft ++ cat /tmp/tmp.HswNfkEqGX ++ rm /tmp/tmp.Zu3fFLI3Ft /tmp/tmp.HswNfkEqGX ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HqVXwiSn5l +++ mktemp ++ local LAST_ERR=/tmp/tmp.SSkrr80fXY ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HqVXwiSn5l ++ cat /tmp/tmp.SSkrr80fXY ++ rm /tmp/tmp.HqVXwiSn5l /tmp/tmp.SSkrr80fXY ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.rzl2PtJWSj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.cUvOIlS7di +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.rzl2PtJWSj +++++ cat /tmp/tmp.cUvOIlS7di +++++ rm /tmp/tmp.rzl2PtJWSj /tmp/tmp.cUvOIlS7di +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ba2HJXUPYZ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.RowAoOcOuS +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ba2HJXUPYZ +++++ cat /tmp/tmp.RowAoOcOuS +++++ rm /tmp/tmp.ba2HJXUPYZ /tmp/tmp.RowAoOcOuS +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f1BVfaVEaL +++ mktemp ++ local LAST_ERR=/tmp/tmp.KHd57O8cgm ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f1BVfaVEaL ++ cat /tmp/tmp.KHd57O8cgm ++ rm /tmp/tmp.f1BVfaVEaL /tmp/tmp.KHd57O8cgm ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FxkGJcZlwy +++ mktemp ++ local LAST_ERR=/tmp/tmp.7BpxtfUmsZ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FxkGJcZlwy ++ cat /tmp/tmp.7BpxtfUmsZ ++ rm /tmp/tmp.FxkGJcZlwy /tmp/tmp.7BpxtfUmsZ ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y8WkxBbrUi/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.rkVgc8assp +++ mktemp ++ local LAST_ERR=/tmp/tmp.Mz1IpM9q66 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rkVgc8assp ++ cat /tmp/tmp.Mz1IpM9q66 ++ rm /tmp/tmp.rkVgc8assp /tmp/tmp.Mz1IpM9q66 ++ return 0 + newpass='h0?NHNxHZpCX[h}0}' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''h0?NHNxHZpCX[h}0}'\'';' '-h some-name-pxc -uroot -p'\''h0?NHNxHZpCX[h}0}'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''h0?NHNxHZpCX[h}0}'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''h0?NHNxHZpCX[h}0}'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N4FwjvD4HO +++ mktemp ++ local LAST_ERR=/tmp/tmp.hEpJtMRVld ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N4FwjvD4HO ++ cat /tmp/tmp.hEpJtMRVld ++ rm /tmp/tmp.N4FwjvD4HO /tmp/tmp.hEpJtMRVld ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''h0?NHNxHZpCX[h}0}'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''h0?NHNxHZpCX[h}0}'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''h0?NHNxHZpCX[h}0}'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''h0?NHNxHZpCX[h}0}'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jRS3L9jnlu +++ mktemp ++ local LAST_ERR=/tmp/tmp.ECLC6KcV4T ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jRS3L9jnlu ++ cat /tmp/tmp.ECLC6KcV4T ++ rm /tmp/tmp.jRS3L9jnlu /tmp/tmp.ECLC6KcV4T ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y8WkxBbrUi/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.6EmfqD3Cak +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hrq4zL6B6V ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6EmfqD3Cak ++ cat /tmp/tmp.Hrq4zL6B6V ++ rm /tmp/tmp.6EmfqD3Cak /tmp/tmp.Hrq4zL6B6V ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.UYBD2ARcZa ++ mktemp + local LAST_ERR=/tmp/tmp.rxZIyCsVws + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UYBD2ARcZa secret/my-cluster-secrets-2 configured + cat /tmp/tmp.rxZIyCsVws Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.UYBD2ARcZa /tmp/tmp.rxZIyCsVws + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WdA4RIs9RE +++ mktemp ++ local LAST_ERR=/tmp/tmp.FWcF8dE5WC ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WdA4RIs9RE ++ cat /tmp/tmp.FWcF8dE5WC ++ rm /tmp/tmp.WdA4RIs9RE /tmp/tmp.FWcF8dE5WC ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ egrep '^(pxc|proxysql)$' ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y8WkxBbrUi/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/sbin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/sbin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/sbin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2182-afafff88#' + /usr/sbin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/sbin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.fIFdWvCC1R ++ mktemp + /usr/sbin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/sbin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.users-12601~ + /usr/sbin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.clTZWQTfRq + local exit_status=0 + /usr/sbin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fIFdWvCC1R perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.clTZWQTfRq + rm /tmp/tmp.fIFdWvCC1R /tmp/tmp.clTZWQTfRq + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9UrXO3h4RV +++ mktemp ++ local LAST_ERR=/tmp/tmp.q42kpoN8Rc ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9UrXO3h4RV ++ cat /tmp/tmp.q42kpoN8Rc ++ rm /tmp/tmp.9UrXO3h4RV /tmp/tmp.q42kpoN8Rc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hjv6tV6K9x +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ekdrj1TVur ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hjv6tV6K9x ++ cat /tmp/tmp.Ekdrj1TVur ++ rm /tmp/tmp.Hjv6tV6K9x /tmp/tmp.Ekdrj1TVur ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tQ2rj18dcq +++ mktemp ++ local LAST_ERR=/tmp/tmp.qnAWvSTWhY ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tQ2rj18dcq ++ cat /tmp/tmp.qnAWvSTWhY ++ rm /tmp/tmp.tQ2rj18dcq /tmp/tmp.qnAWvSTWhY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ggZI6WiHVV +++ mktemp ++ local LAST_ERR=/tmp/tmp.jVLghZVNsM ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ggZI6WiHVV ++ cat /tmp/tmp.jVLghZVNsM ++ rm /tmp/tmp.ggZI6WiHVV /tmp/tmp.jVLghZVNsM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B5kGxC400R +++ mktemp ++ local LAST_ERR=/tmp/tmp.IzmYiU0Qh0 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B5kGxC400R ++ cat /tmp/tmp.IzmYiU0Qh0 ++ rm /tmp/tmp.B5kGxC400R /tmp/tmp.IzmYiU0Qh0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NNCNrtoW8u +++ mktemp ++ local LAST_ERR=/tmp/tmp.uFydggPXc1 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NNCNrtoW8u ++ cat /tmp/tmp.uFydggPXc1 ++ rm /tmp/tmp.NNCNrtoW8u /tmp/tmp.uFydggPXc1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.frzuvoAVRp +++ mktemp ++ local LAST_ERR=/tmp/tmp.8UfzyBS1uU ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.frzuvoAVRp ++ cat /tmp/tmp.8UfzyBS1uU ++ rm /tmp/tmp.frzuvoAVRp /tmp/tmp.8UfzyBS1uU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2XFDGIf8bU +++ mktemp ++ local LAST_ERR=/tmp/tmp.zpIpkCjA6y ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2XFDGIf8bU ++ cat /tmp/tmp.zpIpkCjA6y ++ rm /tmp/tmp.2XFDGIf8bU /tmp/tmp.zpIpkCjA6y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9La6Ra3Cwz +++ mktemp ++ local LAST_ERR=/tmp/tmp.RhCbPkL8kK ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9La6Ra3Cwz ++ cat /tmp/tmp.RhCbPkL8kK ++ rm /tmp/tmp.9La6Ra3Cwz /tmp/tmp.RhCbPkL8kK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YFnDLyCP1i +++ mktemp ++ local LAST_ERR=/tmp/tmp.kAuvcI6dlb ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YFnDLyCP1i ++ cat /tmp/tmp.kAuvcI6dlb ++ rm /tmp/tmp.YFnDLyCP1i /tmp/tmp.kAuvcI6dlb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Mn0q1hII0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.u8ka5Uit3N ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4Mn0q1hII0 ++ cat /tmp/tmp.u8ka5Uit3N ++ rm /tmp/tmp.4Mn0q1hII0 /tmp/tmp.u8ka5Uit3N ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mHB6IuHPDW +++ mktemp ++ local LAST_ERR=/tmp/tmp.xOH3bv2CKJ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mHB6IuHPDW ++ cat /tmp/tmp.xOH3bv2CKJ ++ rm /tmp/tmp.mHB6IuHPDW /tmp/tmp.xOH3bv2CKJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RLDG8Xx4i6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9KixGF80E0 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RLDG8Xx4i6 ++ cat /tmp/tmp.9KixGF80E0 ++ rm /tmp/tmp.RLDG8Xx4i6 /tmp/tmp.9KixGF80E0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lylHmpzQrY +++ mktemp ++ local LAST_ERR=/tmp/tmp.Zn0gNj5gfu ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lylHmpzQrY ++ cat /tmp/tmp.Zn0gNj5gfu ++ rm /tmp/tmp.lylHmpzQrY /tmp/tmp.Zn0gNj5gfu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dghtrQaSHp +++ mktemp ++ local LAST_ERR=/tmp/tmp.jDVJHqpfCU ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dghtrQaSHp ++ cat /tmp/tmp.jDVJHqpfCU ++ rm /tmp/tmp.dghtrQaSHp /tmp/tmp.jDVJHqpfCU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t2fKhDCIDA +++ mktemp ++ local LAST_ERR=/tmp/tmp.Il70SwcqD6 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t2fKhDCIDA ++ cat /tmp/tmp.Il70SwcqD6 ++ rm /tmp/tmp.t2fKhDCIDA /tmp/tmp.Il70SwcqD6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2CPVf72xA3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9lxKIYXbzQ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2CPVf72xA3 ++ cat /tmp/tmp.9lxKIYXbzQ ++ rm /tmp/tmp.2CPVf72xA3 /tmp/tmp.9lxKIYXbzQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.giDCxRWUCk +++ mktemp ++ local LAST_ERR=/tmp/tmp.vTRNIU8VDR ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.giDCxRWUCk ++ cat /tmp/tmp.vTRNIU8VDR ++ rm /tmp/tmp.giDCxRWUCk /tmp/tmp.vTRNIU8VDR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GbGDrl7qzB +++ mktemp ++ local LAST_ERR=/tmp/tmp.SrftCBCCw7 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GbGDrl7qzB ++ cat /tmp/tmp.SrftCBCCw7 ++ rm /tmp/tmp.GbGDrl7qzB /tmp/tmp.SrftCBCCw7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P5V4aUsB2s +++ mktemp ++ local LAST_ERR=/tmp/tmp.UZVWNkdyTQ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P5V4aUsB2s ++ cat /tmp/tmp.UZVWNkdyTQ ++ rm /tmp/tmp.P5V4aUsB2s /tmp/tmp.UZVWNkdyTQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZWfqT36Nhg +++ mktemp ++ local LAST_ERR=/tmp/tmp.gCXFDrHWqe ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZWfqT36Nhg ++ cat /tmp/tmp.gCXFDrHWqe ++ rm /tmp/tmp.ZWfqT36Nhg /tmp/tmp.gCXFDrHWqe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ya3HBOY7vl +++ mktemp ++ local LAST_ERR=/tmp/tmp.EyFYp1kBRR ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ya3HBOY7vl ++ cat /tmp/tmp.EyFYp1kBRR ++ rm /tmp/tmp.ya3HBOY7vl /tmp/tmp.EyFYp1kBRR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y9iQNjIDTu +++ mktemp ++ local LAST_ERR=/tmp/tmp.UMJkSPeNHe ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y9iQNjIDTu ++ cat /tmp/tmp.UMJkSPeNHe ++ rm /tmp/tmp.y9iQNjIDTu /tmp/tmp.UMJkSPeNHe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YPtrqJDPiV +++ mktemp ++ local LAST_ERR=/tmp/tmp.xSUcexz0Po ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YPtrqJDPiV ++ cat /tmp/tmp.xSUcexz0Po ++ rm /tmp/tmp.YPtrqJDPiV /tmp/tmp.xSUcexz0Po ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7XaQkvE5O4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.G2ZDuYeJiF ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7XaQkvE5O4 ++ cat /tmp/tmp.G2ZDuYeJiF ++ rm /tmp/tmp.7XaQkvE5O4 /tmp/tmp.G2ZDuYeJiF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nIXwyv2pJW +++ mktemp ++ local LAST_ERR=/tmp/tmp.aNDX2sDZLx ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nIXwyv2pJW ++ cat /tmp/tmp.aNDX2sDZLx ++ rm /tmp/tmp.nIXwyv2pJW /tmp/tmp.aNDX2sDZLx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6ZQPA3e1XX +++ mktemp ++ local LAST_ERR=/tmp/tmp.FGc0o7rbat ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6ZQPA3e1XX ++ cat /tmp/tmp.FGc0o7rbat ++ rm /tmp/tmp.6ZQPA3e1XX /tmp/tmp.FGc0o7rbat ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KFySSp3Q0I +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y5DDcqSuCz ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KFySSp3Q0I ++ cat /tmp/tmp.Y5DDcqSuCz ++ rm /tmp/tmp.KFySSp3Q0I /tmp/tmp.Y5DDcqSuCz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SjcRjM8r65 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vy73y5wCJz ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SjcRjM8r65 ++ cat /tmp/tmp.Vy73y5wCJz ++ rm /tmp/tmp.SjcRjM8r65 /tmp/tmp.Vy73y5wCJz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KtWzey6LbQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.qdWqrjf4QU ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KtWzey6LbQ ++ cat /tmp/tmp.qdWqrjf4QU ++ rm /tmp/tmp.KtWzey6LbQ /tmp/tmp.qdWqrjf4QU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tgNv598EKy +++ mktemp ++ local LAST_ERR=/tmp/tmp.I9ovomqair ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tgNv598EKy ++ cat /tmp/tmp.I9ovomqair ++ rm /tmp/tmp.tgNv598EKy /tmp/tmp.I9ovomqair ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2dfBt9lv3W ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.HR0GTVvXB2 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2dfBt9lv3W +++++ cat /tmp/tmp.HR0GTVvXB2 +++++ rm /tmp/tmp.2dfBt9lv3W /tmp/tmp.HR0GTVvXB2 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DltyOmNBTd +++ mktemp ++ local LAST_ERR=/tmp/tmp.itEyzI7j3r ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DltyOmNBTd ++ cat /tmp/tmp.itEyzI7j3r ++ rm /tmp/tmp.DltyOmNBTd /tmp/tmp.itEyzI7j3r ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oeqQf60h6d +++ mktemp ++ local LAST_ERR=/tmp/tmp.3zKVbcS1MU ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oeqQf60h6d ++ cat /tmp/tmp.3zKVbcS1MU ++ rm /tmp/tmp.oeqQf60h6d /tmp/tmp.3zKVbcS1MU ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.6UJFkmAREV ++ mktemp + local LAST_ERR=/tmp/tmp.okxsPk5vVX + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6UJFkmAREV secret/my-cluster-secrets patched + cat /tmp/tmp.okxsPk5vVX + rm /tmp/tmp.6UJFkmAREV /tmp/tmp.okxsPk5vVX + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f4KGzIGBBu +++ mktemp ++ local LAST_ERR=/tmp/tmp.iSoXjY2VyK ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f4KGzIGBBu ++ cat /tmp/tmp.iSoXjY2VyK ++ rm /tmp/tmp.f4KGzIGBBu /tmp/tmp.iSoXjY2VyK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.am2f7GdUrx +++ mktemp ++ local LAST_ERR=/tmp/tmp.AvbT5olcn8 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.am2f7GdUrx ++ cat /tmp/tmp.AvbT5olcn8 ++ rm /tmp/tmp.am2f7GdUrx /tmp/tmp.AvbT5olcn8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zAQUEYu7pT +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fsjifq6Ajz ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zAQUEYu7pT ++ cat /tmp/tmp.Fsjifq6Ajz ++ rm /tmp/tmp.zAQUEYu7pT /tmp/tmp.Fsjifq6Ajz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wVXSezMLXX +++ mktemp ++ local LAST_ERR=/tmp/tmp.L1izOcOB4i ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wVXSezMLXX ++ cat /tmp/tmp.L1izOcOB4i ++ rm /tmp/tmp.wVXSezMLXX /tmp/tmp.L1izOcOB4i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xus56wJQp3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.wpp5XZuu0Q ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xus56wJQp3 ++ cat /tmp/tmp.wpp5XZuu0Q ++ rm /tmp/tmp.xus56wJQp3 /tmp/tmp.wpp5XZuu0Q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gIGTdDOTFn +++ mktemp ++ local LAST_ERR=/tmp/tmp.RSxAKfEXze ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gIGTdDOTFn ++ cat /tmp/tmp.RSxAKfEXze ++ rm /tmp/tmp.gIGTdDOTFn /tmp/tmp.RSxAKfEXze ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.auI2ItNYnh +++ mktemp ++ local LAST_ERR=/tmp/tmp.gnbW5y2Zv7 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.auI2ItNYnh ++ cat /tmp/tmp.gnbW5y2Zv7 ++ rm /tmp/tmp.auI2ItNYnh /tmp/tmp.gnbW5y2Zv7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pb8vRAGiVC +++ mktemp ++ local LAST_ERR=/tmp/tmp.Etcu5jzTwK ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pb8vRAGiVC ++ cat /tmp/tmp.Etcu5jzTwK ++ rm /tmp/tmp.pb8vRAGiVC /tmp/tmp.Etcu5jzTwK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1j3hvvx5pr +++ mktemp ++ local LAST_ERR=/tmp/tmp.6Uulry6Af0 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1j3hvvx5pr ++ cat /tmp/tmp.6Uulry6Af0 ++ rm /tmp/tmp.1j3hvvx5pr /tmp/tmp.6Uulry6Af0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aj1pAlovzc +++ mktemp ++ local LAST_ERR=/tmp/tmp.3MprQFslrI ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aj1pAlovzc ++ cat /tmp/tmp.3MprQFslrI ++ rm /tmp/tmp.aj1pAlovzc /tmp/tmp.3MprQFslrI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pMUVzQlgDz +++ mktemp ++ local LAST_ERR=/tmp/tmp.XOJINxGA33 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pMUVzQlgDz ++ cat /tmp/tmp.XOJINxGA33 ++ rm /tmp/tmp.pMUVzQlgDz /tmp/tmp.XOJINxGA33 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ddsFOa7Kgh +++ mktemp ++ local LAST_ERR=/tmp/tmp.GXIAMzlLee ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ddsFOa7Kgh ++ cat /tmp/tmp.GXIAMzlLee ++ rm /tmp/tmp.ddsFOa7Kgh /tmp/tmp.GXIAMzlLee ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ziuww3UCnS +++ mktemp ++ local LAST_ERR=/tmp/tmp.OsFmHrP35G ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ziuww3UCnS ++ cat /tmp/tmp.OsFmHrP35G ++ rm /tmp/tmp.Ziuww3UCnS /tmp/tmp.OsFmHrP35G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Hkhsrk7fe +++ mktemp ++ local LAST_ERR=/tmp/tmp.whXcrTfI3E ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7Hkhsrk7fe ++ cat /tmp/tmp.whXcrTfI3E ++ rm /tmp/tmp.7Hkhsrk7fe /tmp/tmp.whXcrTfI3E ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IHlVDyyc3L +++ mktemp ++ local LAST_ERR=/tmp/tmp.xJ9i9jiKZi ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IHlVDyyc3L ++ cat /tmp/tmp.xJ9i9jiKZi ++ rm /tmp/tmp.IHlVDyyc3L /tmp/tmp.xJ9i9jiKZi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tMKv6EKKXY +++ mktemp ++ local LAST_ERR=/tmp/tmp.ayBNYZPdX7 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tMKv6EKKXY ++ cat /tmp/tmp.ayBNYZPdX7 ++ rm /tmp/tmp.tMKv6EKKXY /tmp/tmp.ayBNYZPdX7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RCEIyWX58B +++ mktemp ++ local LAST_ERR=/tmp/tmp.4focs1PH7h ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RCEIyWX58B ++ cat /tmp/tmp.4focs1PH7h ++ rm /tmp/tmp.RCEIyWX58B /tmp/tmp.4focs1PH7h ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hSzyP2GwyA +++ mktemp ++ local LAST_ERR=/tmp/tmp.sttH4r2sZe ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hSzyP2GwyA ++ cat /tmp/tmp.sttH4r2sZe ++ rm /tmp/tmp.hSzyP2GwyA /tmp/tmp.sttH4r2sZe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BNO4PKx0fQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.lcVBlycThb ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BNO4PKx0fQ ++ cat /tmp/tmp.lcVBlycThb ++ rm /tmp/tmp.BNO4PKx0fQ /tmp/tmp.lcVBlycThb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H0cHyWbXpV +++ mktemp ++ local LAST_ERR=/tmp/tmp.S87t4cA7Aa ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H0cHyWbXpV ++ cat /tmp/tmp.S87t4cA7Aa ++ rm /tmp/tmp.H0cHyWbXpV /tmp/tmp.S87t4cA7Aa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ULYTAa0Gn8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jaWRnKBj4X ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ULYTAa0Gn8 ++ cat /tmp/tmp.jaWRnKBj4X ++ rm /tmp/tmp.ULYTAa0Gn8 /tmp/tmp.jaWRnKBj4X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e48R8iIO2F +++ mktemp ++ local LAST_ERR=/tmp/tmp.hS3QYFPyDu ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e48R8iIO2F ++ cat /tmp/tmp.hS3QYFPyDu ++ rm /tmp/tmp.e48R8iIO2F /tmp/tmp.hS3QYFPyDu ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XNuQTv8VHj +++ mktemp ++ local LAST_ERR=/tmp/tmp.1P9okOqIu2 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XNuQTv8VHj ++ cat /tmp/tmp.1P9okOqIu2 ++ rm /tmp/tmp.XNuQTv8VHj /tmp/tmp.1P9okOqIu2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JbYIJy0IP8 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1XSiuavVnj +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JbYIJy0IP8 +++++ cat /tmp/tmp.1XSiuavVnj +++++ rm /tmp/tmp.JbYIJy0IP8 /tmp/tmp.1XSiuavVnj +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3XagifwBS3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aXIXOizr85 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3XagifwBS3 ++ cat /tmp/tmp.aXIXOizr85 ++ rm /tmp/tmp.3XagifwBS3 /tmp/tmp.aXIXOizr85 ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bQe6qKQNkJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.1gAS1V5Kki ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bQe6qKQNkJ ++ cat /tmp/tmp.1gAS1V5Kki ++ rm /tmp/tmp.bQe6qKQNkJ /tmp/tmp.1gAS1V5Kki ++ return 0 + client_pod=pxc-client-59944c5bbf-w5rpc + wait_pod pxc-client-59944c5bbf-w5rpc + local pod=pxc-client-59944c5bbf-w5rpc + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-w5rpc ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-59944c5bbf-w5rpc condition met waiting for pod/pxc-client-59944c5bbf-w5rpc to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y8WkxBbrUi/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2182/e2e-tests/users/compare/select-3.sql /tmp/tmp.Y8WkxBbrUi/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AIlkC04qc2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EIZxT5PhyH ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AIlkC04qc2 ++ cat /tmp/tmp.EIZxT5PhyH ++ rm /tmp/tmp.AIlkC04qc2 /tmp/tmp.EIZxT5PhyH ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-12601 + local namespace=users-12601 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + /usr/sbin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + sort -u + tee /tmp/tmp.Y8WkxBbrUi/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.rZB8oBjFVK +++ mktemp ++ local LAST_ERR=/tmp/tmp.SdfCItPB1v ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rZB8oBjFVK ++ cat /tmp/tmp.SdfCItPB1v ++ rm /tmp/tmp.rZB8oBjFVK /tmp/tmp.SdfCItPB1v ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-97b698788-pp7qg ++ mktemp + local LAST_OUT=/tmp/tmp.rvRu7Cv7qh ++ mktemp + local LAST_ERR=/tmp/tmp.0aw3TEkt26 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-97b698788-pp7qg + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rvRu7Cv7qh + cat /tmp/tmp.0aw3TEkt26 + rm /tmp/tmp.rvRu7Cv7qh /tmp/tmp.0aw3TEkt26 + return 0 +  }, -  }, -  { -  }, -  { -  }, -  },    },    },    {    },    },    {    },    }, ""),    },    {    },    },    },   }    ... // 16 identical fields    ... // 16 identical fields 2025-09-30T13:05:48.404Z INFO setup Manager starting up {"gitCommit": "afafff8850ef95c8fc11abb22326b3f127543c3d", "gitBranch": "PR-2182-afafff88", "buildTime": "2025-09-30T11:00:29Z", "goVersion": "go1.24.7", "os": "linux", "arch": "amd64"} 2025-09-30T13:05:48.404Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.12-gke.1265000"} 2025-09-30T13:05:48.408Z INFO setup Registering Components. 2025-09-30T13:05:49.202Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-09-30T13:05:49.202Z INFO controller-runtime.metrics Starting metrics server 2025-09-30T13:05:49.202Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-09-30T13:05:49.202Z INFO controller-runtime.webhook Starting webhook server 2025-09-30T13:05:49.202Z INFO setup Starting the Cmd. 2025-09-30T13:05:49.202Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-09-30T13:05:49.283Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-09-30T13:05:49.283Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-09-30T13:05:49.283Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-09-30T13:05:49.303Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-09-30T13:05:49.336Z DEBUG events percona-xtradb-cluster-operator-97b698788-pp7qg_85929190-d30b-4902-a107-51d851af0a0d became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"5a1dc926-eed0-4d7b-98ba-08cc5c609334","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1759237549330895009"}, "reason": "LeaderElection"} 2025-09-30T13:05:49.336Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-09-30T13:05:49.337Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-09-30T13:05:49.337Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-09-30T13:05:49.337Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-09-30T13:05:49.337Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-09-30T13:05:49.438Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-09-30T13:05:49.438Z INFO Starting Controller {"controller": "pxc-controller"} 2025-09-30T13:05:49.438Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-09-30T13:05:49.438Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-09-30T13:05:49.438Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-09-30T13:05:49.438Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-09-30T13:06:21.471Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "66fbec7e-1f72-4097-b305-659f0d37917e", "version": "1.19.0"} 2025-09-30T13:06:21.704Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "66fbec7e-1f72-4097-b305-659f0d37917e", "secrets": "my-cluster-secrets"} 2025-09-30T13:06:21.926Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "66fbec7e-1f72-4097-b305-659f0d37917e", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-09-30T13:06:21.943Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "66fbec7e-1f72-4097-b305-659f0d37917e", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-09-30T13:06:22.513Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "66fbec7e-1f72-4097-b305-659f0d37917e", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-09-30T13:06:22.646Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "899e3540-b86d-4bae-89dd-ee1091a5b676", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-09-30T13:06:22.709Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "899e3540-b86d-4bae-89dd-ee1091a5b676", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-09-30T13:06:22.777Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "899e3540-b86d-4bae-89dd-ee1091a5b676", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-09-30T13:06:22.827Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "899e3540-b86d-4bae-89dd-ee1091a5b676", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-09-30T13:06:22.908Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "899e3540-b86d-4bae-89dd-ee1091a5b676", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-09-30T13:06:23.009Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "899e3540-b86d-4bae-89dd-ee1091a5b676", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-09-30T13:06:23.559Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "981c7d79-5ec2-496d-b2bf-bcbc5fa9ce1b", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-09-30T13:06:23.584Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "981c7d79-5ec2-496d-b2bf-bcbc5fa9ce1b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-09-30T13:07:39.810Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "34a5b0ff-ac5f-4e38-9801-a14717053d25", "user": "operator"} 2025-09-30T13:07:39.840Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "34a5b0ff-ac5f-4e38-9801-a14717053d25", "user": "monitor"} 2025-09-30T13:07:39.892Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "34a5b0ff-ac5f-4e38-9801-a14717053d25"} 2025-09-30T13:07:39.945Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "34a5b0ff-ac5f-4e38-9801-a14717053d25"} 2025-09-30T13:07:39.977Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "34a5b0ff-ac5f-4e38-9801-a14717053d25", "user": "xtrabackup"} 2025-09-30T13:07:40.017Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "34a5b0ff-ac5f-4e38-9801-a14717053d25"} 2025-09-30T13:07:40.049Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "34a5b0ff-ac5f-4e38-9801-a14717053d25", "user": "replication"} 2025-09-30T13:07:40.058Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "34a5b0ff-ac5f-4e38-9801-a14717053d25", "err": "get primary pxc pod: not found"} 2025-09-30T13:07:44.783Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "b3c64f40-c503-45da-a7a1-6a1c19dfc88b", "err": "get primary pxc pod: not found"} 2025-09-30T13:07:49.991Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "949c2bb7-8f72-4265-8171-dcc364175976", "err": "get primary pxc pod: not found"} 2025-09-30T13:07:55.129Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "053f2c8b-cf6c-492c-871f-aab89198cec1", "err": "get primary pxc pod: not found"} 2025-09-30T13:10:06.815Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "f957dbc4-8e6d-411c-b783-6949a35469e5", "user": "root"} 2025-09-30T13:10:06.949Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "f957dbc4-8e6d-411c-b783-6949a35469e5", "new version": "8.0.42-33.1"} 2025-09-30T13:10:08.615Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "f957dbc4-8e6d-411c-b783-6949a35469e5"} 2025-09-30T13:10:13.827Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "ff557706-3789-4b2f-86af-72f5020d37d2"} 2025-09-30T13:10:19.013Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "455cbb25-6c69-43e5-a62f-865ee43b94e1"} 2025-09-30T13:10:24.636Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "d262921c-af68-441e-9074-3b8156710544"} 2025-09-30T13:10:29.762Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "034c7c92-6628-4c0c-bca0-c6f16a1623e4"} 2025-09-30T13:10:35.316Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "75453087-1448-4598-8137-a87150c39a38"} 2025-09-30T13:10:40.418Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "7988a8fd-e7f7-424f-9cbc-c8f56c6bbb06"} 2025-09-30T13:10:45.952Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1f64aba0-3976-4081-94fd-20ca618d99a1"} 2025-09-30T13:10:51.250Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "e004f106-40e5-49ed-9557-deed280e96be"} 2025-09-30T13:10:56.528Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "a22305b4-e508-4ebe-a142-16baa1035f8e"} 2025-09-30T13:11:01.844Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "26c2a1d5-a75b-4acb-bc02-de8d046c0076"} 2025-09-30T13:11:07.337Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "35a3c80d-248a-4502-bc53-488c1f114b14"} 2025-09-30T13:11:13.418Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "d20dff6e-6f53-4ad7-870e-88aa142313fc"} 2025-09-30T13:11:18.037Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "11413379-a982-4f69-8a11-dcdf28b2f7ec"} 2025-09-30T13:11:23.555Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "84dc54ee-fc15-43d2-923a-b9e18e6bca83"} 2025-09-30T13:11:28.941Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "ea0f251b-5f34-43de-87f2-a8c740ba860a"} 2025-09-30T13:11:34.040Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "4e6931c4-00d0-4b00-973f-72e780f31b7d"} 2025-09-30T13:11:34.618Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "6f733b39-f14d-4e13-91be-874c44fe9e31", "user": "root"} 2025-09-30T13:11:34.643Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "6f733b39-f14d-4e13-91be-874c44fe9e31", "user": "root"} 2025-09-30T13:11:34.662Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "6f733b39-f14d-4e13-91be-874c44fe9e31", "secret": "some-name-mysql-init", "user": "root"} 2025-09-30T13:11:37.038Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "6f733b39-f14d-4e13-91be-874c44fe9e31"} 2025-09-30T13:11:37.061Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "6f733b39-f14d-4e13-91be-874c44fe9e31", "user": "root"} 2025-09-30T13:11:37.083Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "6f733b39-f14d-4e13-91be-874c44fe9e31", "user": "root"} 2025-09-30T13:11:38.659Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "6f733b39-f14d-4e13-91be-874c44fe9e31"} 2025-09-30T13:11:44.806Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "bd038be9-d396-4164-8f50-32e7810ebcc7"} 2025-09-30T13:11:50.129Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "a8d40b34-ef9d-4cc8-ae13-635ed0a477c6"} 2025-09-30T13:11:54.136Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "67aa7597-13fb-42a1-b9fe-d31aa1911690", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:11:54.206Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "67aa7597-13fb-42a1-b9fe-d31aa1911690", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:11:55.537Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "0e22b13c-f991-4b35-9119-a7227a7058d3"} 2025-09-30T13:12:18.873Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "45fe0c06-e68f-47e3-8ef9-2df9cb59d971", "user": "proxyadmin"} 2025-09-30T13:12:18.873Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "45fe0c06-e68f-47e3-8ef9-2df9cb59d971", "user": "proxyadmin"} 2025-09-30T13:12:18.910Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "45fe0c06-e68f-47e3-8ef9-2df9cb59d971", "user": "proxyadmin"} 2025-09-30T13:12:18.935Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "45fe0c06-e68f-47e3-8ef9-2df9cb59d971", "user": "proxyadmin"} 2025-09-30T13:12:18.935Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "45fe0c06-e68f-47e3-8ef9-2df9cb59d971", "last-applied-secret": "b748d1f8cceb1729a8cfefe55d83d125aa5b078634fdab5276ba861a89334a34"} 2025-09-30T13:12:18.939Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "45fe0c06-e68f-47e3-8ef9-2df9cb59d971", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:12:19.571Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "f171c777-6e71-49fc-a1b8-7d7c3091fb6d", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:509) : Galera hostgroup retrieval failed. \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:509) : Galera hostgroup retrieval failed. \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-09-30T13:12:54.644Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "be2bcd50-58a1-4d0d-9561-24b5e0dfacba"} 2025-09-30T13:12:58.812Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "80e15d6e-e545-40c3-8e81-1f5c280782d2", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:12:58.917Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "80e15d6e-e545-40c3-8e81-1f5c280782d2", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:13:00.378Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "5c08cc23-b488-4eeb-ac6d-8f3697e71e95", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-09-30T13:13:00.859Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1d99b88d-2c5a-4d4b-b18b-ffef6763dcd1", "user": "xtrabackup"} 2025-09-30T13:13:00.873Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1d99b88d-2c5a-4d4b-b18b-ffef6763dcd1", "user": "xtrabackup"} 2025-09-30T13:13:00.895Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1d99b88d-2c5a-4d4b-b18b-ffef6763dcd1", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-09-30T13:13:00.918Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1d99b88d-2c5a-4d4b-b18b-ffef6763dcd1", "user": "xtrabackup"} 2025-09-30T13:13:00.930Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1d99b88d-2c5a-4d4b-b18b-ffef6763dcd1", "user": "xtrabackup"} 2025-09-30T13:13:00.937Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1d99b88d-2c5a-4d4b-b18b-ffef6763dcd1", "last-applied-secret": "234acd7af96d16f48e2a6d30deaa86db83d7dd00a0cac986e01608ecb47eb03a"} 2025-09-30T13:13:00.940Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1d99b88d-2c5a-4d4b-b18b-ffef6763dcd1", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:13:03.362Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1d99b88d-2c5a-4d4b-b18b-ffef6763dcd1"} 2025-09-30T13:13:51.254Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "a49eb436-5aee-4caa-8189-754496abf1b1", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:13:55.817Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "31d26bca-0792-4e3c-a311-1902eeb04e49", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:14:01.143Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "a4b9faca-6319-460a-8072-947075f8c07d", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:14:38.458Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "93207336-cf80-478e-9a0b-61b853570424", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:14:38.803Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "5e0dc9f6-ed39-42e5-bcf0-2997de835d78", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:14:44.151Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "3fc6ce60-2f6f-41e5-99bf-2b8752cb091c", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:14:49.451Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "3be1b5f3-8453-473e-8c23-ea41345d63fd", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:14:54.621Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "16031561-67d9-42c7-8452-27e13c7d253c", "primary name": "some-name-pxc-0.some-name-pxc.users-12601.svc.cluster.local"} 2025-09-30T13:15:20.453Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "c9875cab-2644-4441-9ac7-6c110c3d4df2", "primary name": "some-name-pxc-0.some-name-pxc.users-12601.svc.cluster.local"} 2025-09-30T13:15:28.503Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "ee270734-e16c-4d04-becd-beb040c2d5d1"} 2025-09-30T13:15:33.300Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "b3c579c3-a749-47c6-823b-e58e04e345f9"} 2025-09-30T13:15:33.938Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1ed393c5-4e21-49a6-87d2-b2f6e6ec5c61", "user": "monitor"} 2025-09-30T13:15:33.954Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1ed393c5-4e21-49a6-87d2-b2f6e6ec5c61", "user": "monitor"} 2025-09-30T13:15:33.975Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1ed393c5-4e21-49a6-87d2-b2f6e6ec5c61", "secret": "some-name-mysql-init", "user": "monitor"} 2025-09-30T13:15:33.998Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1ed393c5-4e21-49a6-87d2-b2f6e6ec5c61", "user": "monitor"} 2025-09-30T13:15:34.021Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1ed393c5-4e21-49a6-87d2-b2f6e6ec5c61", "user": "monitor"} 2025-09-30T13:15:34.329Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1ed393c5-4e21-49a6-87d2-b2f6e6ec5c61", "last-applied-secret": "c84a768389e6a48d66220f71cf8e982e0d648a115dbbe90a1461efb8b4e6ac53"} 2025-09-30T13:15:34.333Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1ed393c5-4e21-49a6-87d2-b2f6e6ec5c61", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:15:36.838Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "00d1d9ca-d368-45d3-a5ed-db6753c3d056", "user": "monitor"} 2025-09-30T13:15:36.867Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1ed393c5-4e21-49a6-87d2-b2f6e6ec5c61", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-09-30T13:15:39.122Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "00d1d9ca-d368-45d3-a5ed-db6753c3d056", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-09-30T13:16:17.844Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "71e68213-5eb9-4716-8925-78e3538bea64", "user": "monitor"} 2025-09-30T13:16:19.870Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "71e68213-5eb9-4716-8925-78e3538bea64"} 2025-09-30T13:16:22.835Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1e6ff00b-07b3-438f-a38c-50876f1d5148", "user": "monitor"} 2025-09-30T13:16:24.746Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1e6ff00b-07b3-438f-a38c-50876f1d5148"} 2025-09-30T13:16:28.530Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "313eda70-4ca9-4fda-9ddc-ce48177bd3c7", "user": "monitor"} 2025-09-30T13:16:29.943Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "313eda70-4ca9-4fda-9ddc-ce48177bd3c7"} 2025-09-30T13:16:33.903Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "72d8353f-32e8-4317-8637-03573fea1a5a", "user": "monitor"} 2025-09-30T13:16:34.560Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "72d8353f-32e8-4317-8637-03573fea1a5a", "user": "monitor"} 2025-09-30T13:16:34.573Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "72d8353f-32e8-4317-8637-03573fea1a5a", "last-applied-secret": "c84a768389e6a48d66220f71cf8e982e0d648a115dbbe90a1461efb8b4e6ac53"} 2025-09-30T13:16:36.337Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "72d8353f-32e8-4317-8637-03573fea1a5a"} 2025-09-30T13:16:41.624Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "01cf3d9f-9ca9-479e-af39-31aebf5c73e9"} 2025-09-30T13:16:47.248Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "782188cb-f25b-40a2-99f3-b6a1a7c359ee"} 2025-09-30T13:16:52.356Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "096d89b1-3ac4-4851-8855-dc1159c436a4"} 2025-09-30T13:16:57.863Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "723961e6-55b4-4197-b769-4b466f3de51d"} 2025-09-30T13:17:02.343Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "f8d67132-b2fe-4d55-a0fc-1a611fab13b1", "user": "operator"} 2025-09-30T13:17:02.360Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "f8d67132-b2fe-4d55-a0fc-1a611fab13b1", "user": "operator"} 2025-09-30T13:17:02.382Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "f8d67132-b2fe-4d55-a0fc-1a611fab13b1", "secret": "some-name-mysql-init", "user": "operator"} 2025-09-30T13:17:02.404Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "f8d67132-b2fe-4d55-a0fc-1a611fab13b1", "user": "operator"} 2025-09-30T13:17:02.419Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "f8d67132-b2fe-4d55-a0fc-1a611fab13b1", "user": "operator"} 2025-09-30T13:17:02.438Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "f8d67132-b2fe-4d55-a0fc-1a611fab13b1", "last-applied-secret": "8e4372eebb2b14c0c1f8f1492b7a3b6d6c9902c4460dacc6ca2100d669e79bf9"} 2025-09-30T13:17:02.484Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "f8d67132-b2fe-4d55-a0fc-1a611fab13b1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:17:03.487Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "2e6d0e06-ba9d-4493-8be9-6d229f869ccd", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-1 does not have a host assigned / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-1 does not have a host assigned / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-09-30T13:17:22.435Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "5e4bde79-c465-4af1-b54f-0c53e3f19332"} 2025-09-30T13:17:27.233Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "b83b65c0-2e6e-4b48-ba88-7d2d909459b9"} 2025-09-30T13:17:32.441Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "197c96e6-f047-4d8f-964d-4a1bee061b2d"} 2025-09-30T13:17:37.719Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "c02cb120-ed0b-4030-9949-f9f580653a0f"} 2025-09-30T13:17:42.302Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "secrets": "my-cluster-secrets-2"} 2025-09-30T13:17:42.311Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "root"} 2025-09-30T13:17:42.336Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "root"} 2025-09-30T13:17:42.357Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "secret": "some-name-mysql-init", "user": "root"} 2025-09-30T13:17:45.733Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "c63614c4-7079-4754-b632-7a7912413216"} 2025-09-30T13:17:45.745Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77"} 2025-09-30T13:17:45.768Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "root"} 2025-09-30T13:17:45.790Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "root"} 2025-09-30T13:17:45.796Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "operator"} 2025-09-30T13:17:45.809Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "operator"} 2025-09-30T13:17:45.828Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "secret": "some-name-mysql-init", "user": "operator"} 2025-09-30T13:17:45.846Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "operator"} 2025-09-30T13:17:45.861Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "operator"} 2025-09-30T13:17:45.868Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "monitor"} 2025-09-30T13:17:45.881Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "monitor"} 2025-09-30T13:17:45.897Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "secret": "some-name-mysql-init", "user": "monitor"} 2025-09-30T13:17:45.916Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "monitor"} 2025-09-30T13:17:45.935Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "monitor"} 2025-09-30T13:17:46.221Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "xtrabackup"} 2025-09-30T13:17:46.238Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "xtrabackup"} 2025-09-30T13:17:46.258Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-09-30T13:17:46.289Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "xtrabackup"} 2025-09-30T13:17:46.305Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "xtrabackup"} 2025-09-30T13:17:46.309Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "replication"} 2025-09-30T13:17:46.326Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "replication"} 2025-09-30T13:17:46.356Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "secret": "some-name-mysql-init", "user": "replication"} 2025-09-30T13:17:46.383Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "replication"} 2025-09-30T13:17:46.397Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "replication"} 2025-09-30T13:17:46.397Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "proxyadmin"} 2025-09-30T13:17:46.417Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "proxyadmin"} 2025-09-30T13:17:46.450Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "user": "proxyadmin"} 2025-09-30T13:17:46.450Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "last-applied-secret": "544f2e2aa407ddaae0532735df38b5d540612cb01664cbb11669c4c8518499f3"} 2025-09-30T13:17:46.450Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "last-applied-secret": "544f2e2aa407ddaae0532735df38b5d540612cb01664cbb11669c4c8518499f3"} 2025-09-30T13:17:46.453Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:17:46.547Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:17:48.119Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "559d9d35-5341-4a0c-815e-0c23883ade77", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-09-30T13:18:39.688Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "2005ff6c-4806-441d-a7f5-fb3212c6bbbd", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:18:44.627Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "5e0660f3-1822-40fb-bd11-17fbb7f0d5d3", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:19:26.683Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "c336fbf8-f982-42bd-b96e-5816ebb74821", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:19:32.037Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "e46ab15f-a43a-4a03-bc35-2919283903d6", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:19:37.237Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "addbaa66-fa9f-41bc-8a42-5ab2fe89ef2a", "primary name": "some-name-pxc-0.some-name-pxc.users-12601.svc.cluster.local"} 2025-09-30T13:19:42.413Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "be4db952-7a19-4fdc-9466-bcdb3cc47974", "primary name": "some-name-pxc-0.some-name-pxc.users-12601.svc.cluster.local"} 2025-09-30T13:19:47.603Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "9bc5ddc2-c08a-4c85-a4c1-39ffc1c007c0", "primary name": "some-name-pxc-0.some-name-pxc.users-12601.svc.cluster.local"} 2025-09-30T13:19:52.740Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "3827f7af-13eb-4cc7-9a6b-3b4d7bcf4c80", "primary name": "some-name-pxc-0.some-name-pxc.users-12601.svc.cluster.local"} 2025-09-30T13:19:57.881Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "370a26dc-2712-4c17-8014-56a8f4d912fa", "primary name": "some-name-pxc-0.some-name-pxc.users-12601.svc.cluster.local"} 2025-09-30T13:20:08.909Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "fbe49419-fb58-4359-bf4c-34be85eec7ed", "primary name": "some-name-pxc-0.some-name-pxc.users-12601.svc.cluster.local"} 2025-09-30T13:20:14.977Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "d2018726-33a4-4fa9-b52b-7776658e2f98", "user": "monitor"} 2025-09-30T13:20:15.833Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "d2018726-33a4-4fa9-b52b-7776658e2f98", "user": "monitor"} 2025-09-30T13:20:15.849Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "d2018726-33a4-4fa9-b52b-7776658e2f98", "last-applied-secret": "544f2e2aa407ddaae0532735df38b5d540612cb01664cbb11669c4c8518499f3"} 2025-09-30T13:20:17.774Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "d2018726-33a4-4fa9-b52b-7776658e2f98"} 2025-09-30T13:20:19.550Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1286a9af-a750-4e28-8cbb-22155aa140bc", "user": "operator"} 2025-09-30T13:20:19.567Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1286a9af-a750-4e28-8cbb-22155aa140bc", "user": "operator"} 2025-09-30T13:20:19.586Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1286a9af-a750-4e28-8cbb-22155aa140bc", "secret": "some-name-mysql-init", "user": "operator"} 2025-09-30T13:20:19.607Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1286a9af-a750-4e28-8cbb-22155aa140bc", "user": "operator"} 2025-09-30T13:20:19.621Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1286a9af-a750-4e28-8cbb-22155aa140bc", "user": "operator"} 2025-09-30T13:20:19.641Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1286a9af-a750-4e28-8cbb-22155aa140bc", "last-applied-secret": "352e4d5e5053ec0befe87177e585d4b64885da294b14a8e1116b78fe891e7583"} 2025-09-30T13:20:19.645Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1286a9af-a750-4e28-8cbb-22155aa140bc", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:20:22.695Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1286a9af-a750-4e28-8cbb-22155aa140bc", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12601.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-09-30T13:21:09.148Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "a427bb77-e03b-4a07-8cca-9a92574fd079"} 2025-09-30T13:21:13.906Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "7a44612e-723c-4f0d-b355-14ee08ff0128"} 2025-09-30T13:21:19.202Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "0e24709a-5036-4322-9695-01a7d1845126"} 2025-09-30T13:21:24.512Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "2a6103f8-135f-479f-8f1e-41549226e564"} 2025-09-30T13:21:29.484Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "9fa9157b-639a-4013-9b88-27a0443e7ef7"} 2025-09-30T13:21:35.088Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "b6a6cb3c-09db-4317-a162-3daf0dbc167a"} 2025-09-30T13:21:40.688Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "20552a23-3e29-4f2a-95a2-fa0bf00b0cca"} 2025-09-30T13:21:46.293Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "2f1698be-f833-439b-b040-978fd59bba2e"} 2025-09-30T13:21:51.193Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "2c341713-289a-4049-bf13-4953e07a0a62"} 2025-09-30T13:21:56.529Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "71820462-7cd4-41bf-a663-4aa5f166148d"} 2025-09-30T13:22:01.725Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "754f71b1-5845-4f58-b9b4-83813d91cecd"} 2025-09-30T13:22:07.014Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "079b0a69-6e04-46e6-bbb2-66c6c92780cf"} 2025-09-30T13:22:12.528Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "8c11a7a9-c09e-4495-b3ce-de71d046c3f6"} 2025-09-30T13:22:17.676Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "2c518ec3-a62c-4de3-a558-86dc92310bd8"} 2025-09-30T13:22:22.803Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "5893e6bb-a065-4253-ab9a-4ea64fb40b1d"} 2025-09-30T13:22:25.338Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "root"} 2025-09-30T13:22:25.364Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "root"} 2025-09-30T13:22:25.384Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "secret": "some-name-mysql-init", "user": "root"} 2025-09-30T13:22:27.807Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69"} 2025-09-30T13:22:27.830Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "root"} 2025-09-30T13:22:27.856Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "root"} 2025-09-30T13:22:27.875Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "monitor"} 2025-09-30T13:22:27.890Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "monitor"} 2025-09-30T13:22:27.906Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "secret": "some-name-mysql-init", "user": "monitor"} 2025-09-30T13:22:27.926Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "monitor"} 2025-09-30T13:22:27.946Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "monitor"} 2025-09-30T13:22:28.237Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "xtrabackup"} 2025-09-30T13:22:28.251Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "xtrabackup"} 2025-09-30T13:22:28.284Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-09-30T13:22:28.304Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "xtrabackup"} 2025-09-30T13:22:28.324Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "xtrabackup"} 2025-09-30T13:22:28.332Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "proxyadmin"} 2025-09-30T13:22:28.353Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "proxyadmin"} 2025-09-30T13:22:28.379Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "user": "proxyadmin"} 2025-09-30T13:22:28.379Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "last-applied-secret": "8ebcf396945e5b811eb284a3efdf24a85a417e42841c212bdc20a35bfc9b6f76"} 2025-09-30T13:22:28.379Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "last-applied-secret": "8ebcf396945e5b811eb284a3efdf24a85a417e42841c212bdc20a35bfc9b6f76"} 2025-09-30T13:22:28.382Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:22:28.439Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:22:30.111Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "95415174-18aa-419a-afda-619333af7d69", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-09-30T13:22:45.733Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "e13267e8-a02d-441d-aba1-0f4b01b09be2", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:22:45.779Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "e13267e8-a02d-441d-aba1-0f4b01b09be2", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-09-30T13:22:45.848Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "e13267e8-a02d-441d-aba1-0f4b01b09be2", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-09-30T13:22:45.948Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "e13267e8-a02d-441d-aba1-0f4b01b09be2", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-09-30T13:22:46.124Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "e13267e8-a02d-441d-aba1-0f4b01b09be2", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-09-30T13:22:47.077Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "234400ae-6035-49ae-b73e-8a42b4d2a8b1", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-09-30T13:24:25.134Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "99987b19-67ad-47b6-8169-91b1cd033a2a", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:25:02.156Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "1312c302-bc6e-4d93-9f66-164e08b23559", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:25:02.903Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "d60d5237-1561-489c-9e95-227d354368b3", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:25:07.459Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "5c01a3c8-c5d7-4e00-837e-fcdcab73c759", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:25:50.335Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "root"} 2025-09-30T13:25:50.358Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "root"} 2025-09-30T13:25:50.394Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "secret": "some-name-mysql-init", "user": "root"} 2025-09-30T13:25:50.420Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "root"} 2025-09-30T13:25:50.441Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "root"} 2025-09-30T13:25:50.447Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "operator"} 2025-09-30T13:25:50.462Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "operator"} 2025-09-30T13:25:50.493Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "secret": "some-name-mysql-init", "user": "operator"} 2025-09-30T13:25:50.519Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "operator"} 2025-09-30T13:25:50.531Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "operator"} 2025-09-30T13:25:50.538Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "monitor"} 2025-09-30T13:25:50.558Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "monitor"} 2025-09-30T13:25:50.591Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "secret": "some-name-mysql-init", "user": "monitor"} 2025-09-30T13:25:50.609Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "monitor"} 2025-09-30T13:25:50.920Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "xtrabackup"} 2025-09-30T13:25:50.936Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "xtrabackup"} 2025-09-30T13:25:50.954Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-09-30T13:25:50.972Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "xtrabackup"} 2025-09-30T13:25:50.986Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "xtrabackup"} 2025-09-30T13:25:50.992Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "replication"} 2025-09-30T13:25:51.004Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "replication"} 2025-09-30T13:25:51.025Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "secret": "some-name-mysql-init", "user": "replication"} 2025-09-30T13:25:51.048Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "replication"} 2025-09-30T13:25:51.061Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "user": "replication"} 2025-09-30T13:25:51.061Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "last-applied-secret": "8e4372eebb2b14c0c1f8f1492b7a3b6d6c9902c4460dacc6ca2100d669e79bf9"} 2025-09-30T13:25:51.063Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "699fe6c0-ef18-4a42-a059-e3191a4d45c0", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-09-30T13:27:25.456Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "9b904bee-f48d-4b15-9232-cff1959d9949", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:27:25.803Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "ca292b6c-9dcd-4e0b-97eb-48f9b0e24b11", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:27:30.764Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "a186fb14-fce5-4331-b8c4-554d00d408e2", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-12601 on 34.118.224.10:53: no such host"} 2025-09-30T13:28:08.589Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "eb045233-e434-413d-9165-9f35a2fe5fe9", "user": "monitor"} 2025-09-30T13:28:08.607Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "eb045233-e434-413d-9165-9f35a2fe5fe9", "user": "monitor"} 2025-09-30T13:28:08.623Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "eb045233-e434-413d-9165-9f35a2fe5fe9", "secret": "some-name-mysql-init", "user": "monitor"} 2025-09-30T13:28:08.640Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "eb045233-e434-413d-9165-9f35a2fe5fe9", "user": "monitor"} 2025-09-30T13:28:13.559Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "3556fa99-a509-4225-8dac-2a55c7171704", "user": "monitor"} 2025-09-30T13:28:19.151Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-12601", "name": "some-name", "reconcileID": "df253baf-df7f-4e81-ad79-6debc741c562", "user": "monitor"}    ... // 22 identical fields -  "234acd7af96d16f48e2a6d30deaa86db83d7dd00a0cac986e01608ecb47eb03a", +  "2e4d5e5053ec0befe87177e585d4b64885da294b14a8e1116b78fe891e758",    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields +  "3",    "3", -  "352e4d5e5053ec0befe87177e585d4b64885da294b14a8e1116b78fe891e7583",    ... // 3 identical elements    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields +  "4372eebb2b14c0c1f8f1492b7a3b6d6c9902c4460dacc6ca2100d669e79bf9", -  "44f2e2aa407ddaae0532735df38b5d540612cb01664cbb11669c4c8518499f",    ... // 4 identical fields    "5", +  "544f2e2aa407ddaae0532735df38b5d540612cb01664cbb11669c4c8518499f3", -  "544f2e2aa407ddaae0532735df38b5d540612cb01664cbb11669c4c8518499f3",    ... // 5 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 6 identical fields    ... // 7 identical fields    "8e", +  "8e4372eebb2b14c0c1f8f1492b7a3b6d6c9902c4460dacc6ca2100d669e79bf9", -  "8e4372eebb2b14c0c1f8f1492b7a3b6d6c9902c4460dacc6ca2100d669e79bf9", +  "8ebcf396945e5b811eb284a3efdf24a85a417e42841c212bdc20a35bfc9b6f76",    ... // 8 identical fields    ... // 9 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, +  Annotations: map[string]string{ -  Annotations: map[string]string{    Annotations: map[string]string{ +  APIVersion: "", -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  APIVersion: "v1",    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, -  Args: []string{"logrotate"},    AutomountServiceAccountToken: nil, +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3,    AWSElasticBlockStore: nil,    AzureFile: nil, -  "b748d1f8cceb1729a8cfefe55d83d125aa5b078634fdab5276ba861a89334a34", -  "bcf396945e5b811eb284a3efdf24a85a417e42841c212bdc20a35bfc9b6f76", +  "c84a768389e6a48d66220f71cf8e982e0d648a115dbbe90a1461efb8b4e6ac53", -  "c84a768389e6a48d66220f71cf8e982e0d648a115dbbe90a1461efb8b4e6ac53",    Capacity: nil, -  CollisionCount: &0, +  CollisionCount: nil,    Conditions: nil,    ConfigMapKeyRef: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    Containers: []v1.Container{ +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-09-30 13:06:22 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-proxysql-6488fdbb67", -  CurrentRevision: "some-name-proxysql-6b9d978f95", -  CurrentRevision: "some-name-proxysql-6c47f79d9b", -  CurrentRevision: "some-name-proxysql-74b8c748f5", -  CurrentRevision: "some-name-proxysql-85d6478b4b", -  CurrentRevision: "some-name-proxysql-85f9bbd74", -  CurrentRevision: "some-name-pxc-6557dbb7fc", -  CurrentRevision: "some-name-pxc-6b47768cd6", -  CurrentRevision: "some-name-pxc-775fc9cc8b", -  CurrentRevision: "some-name-pxc-9f4c977f5",    DataSource: nil,    DataSourceRef: nil, -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil,    DeletionGracePeriodSeconds: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst",    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, -  Env: []v1.EnvVar{    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    FieldPath: "metadata.name",    FieldPath: "metadata.namespace",    FieldRef: &v1.ObjectFieldSelector{ -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`...,    Finalizers: nil,    Finalizers: nil, +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Generation: 5, -  Generation: 6, -  Generation: 7, -  Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1    GitRepo: nil, /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869    HostAliases: nil,    HostIP: "",    HostPort: 0, -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  ImagePullPolicy: "Always",    ImagePullPolicy: "Always",    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    "kubectl.kubernetes.io/default-container": "proxysql",    "kubectl.kubernetes.io/default-container": "pxc",    Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: nil, +  "last-applied-secret": "234acd7af96d16f48e2a6d30deaa86db83d7dd00a0cac986e01608ecb47eb03a", +  "last-applied-secret": "b748d1f8cceb1729a8cfefe55d83d125aa5b078634fdab5276ba861a89334a34",    "last-applied-secret": strings.Join({    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-pxc"}, +  ManagedFields: nil,    ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator",    MinReadySeconds: 0, [mysql] 2025/09/30 13:25:27 packets.go:58 read tcp 10.251.178.68:60600->34.118.230.66:3306: i/o timeout    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    {Name: "CLUSTER_HASH", Value: "8265665"},    Name: "config",    Name: "DEFAULT_AUTHENTICATION_PLUGIN", -  {Name: "IS_LOGCOLLECTOR", Value: "yes"},    Name: "ist",    {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, -  {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"},    {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, -  Name: "logrotate", -  Name: "logs",    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"},    {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"},    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}},    Name: "POD_NAME",    Name: "POD_NAMESPASE", -  {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...},    Name: "proxyadm",    {Name: "READINESS_CHECK_TIMEOUT", Value: "15"}, -  {Name: "SERVICE_TYPE", Value: "mysql"},    Namespace: "users-12601",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}},    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "b748d1f8cceb1729a8cfefe55d83d125aa5b078634fdab5276ba861a89334a34", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "8ebcf396945e5b811eb284a3efdf24a85a417e42841c212bdc20a35bfc9b6f76", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    ObjectMeta: v1.ObjectMeta{ +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  ObservedGeneration: 5, -  ObservedGeneration: 6, -  ObservedGeneration: 7, -  ObservedGeneration: 8, -  Operation: "Update", -  Operation: "Update",    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "04100fa2-4353-418d-99a0-7c750a106563", ...}},    OwnerReferences: nil,    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzUyZTRkNWU1MDUzZWMwYmVmZTg3MTc3ZTU4NWQ0YjY0ODg1ZGEyOTRiMTRhOGUxMTE2Yjc4ZmU4OTFlNzU4MyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzUyZTRkNWU1MDUzZWMwYmVmZTg3MTc3ZTU4NWQ0YjY0ODg1ZGEyOTRiMTRhOGUxMTE2Yjc4ZmU4OTFlNzU4MyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTQ0ZjJlMmFhNDA3ZGRhYWUwNTMyNzM1ZGYzOGI1ZDU0MDYxMmNiMDE2NjRjYmIxMTY2OWM0Yzg1MTg0OTlmMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTQ0ZjJlMmFhNDA3ZGRhYWUwNTMyNzM1ZGYzOGI1ZDU0MDYxMmNiMDE2NjRjYmIxMTY2OWM0Yzg1MTg0OTlmMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGU0MzcyZWViYjJiMTRjMGMxZjhmMTQ5MmI3YTNiNmQ2Yzk5MDJjNDQ2MGRhY2M2Y2EyMTAwZDY2OWU3OWJmOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGU0MzcyZWViYjJiMTRjMGMxZjhmMTQ5MmI3YTNiNmQ2Yzk5MDJjNDQ2MGRhY2M2Y2EyMTAwZDY2OWU3OWJmOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGViY2YzOTY5NDVlNWI4MTFlYjI4NGEzZWZkZjI0YTg1YTQxN2U0Mjg0MWMyMTJiZGMyMGEzNWJmYzliNmY3NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjc0OGQxZjhjY2ViMTcyOWE4Y2ZlZmU1NWQ4M2QxMjVhYTViMDc4NjM0ZmRhYjUyNzZiYTg2MWE4OTMzNGEzNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYzg0YTc2ODM4OWU2YTQ4ZDY2MjIwZjcxY2Y4ZTk4MmUwZDY0OGExMTVkYmJlOTBhMTQ2MWVmYjhiNGU2YWM1MyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYzg0YTc2ODM4OWU2YTQ4ZDY2MjIwZjcxY2Y4ZTk4MmUwZDY0OGExMTVkYmJlOTBhMTQ2MWVmYjhiNGU2YWM1MyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjM0YWNkN2FmOTZkMTZmNDhlMmE2ZDMwZGVhYTg2ZGI4M2Q3ZGQwMGEwY2FjOTg2ZTAxNjA4ZWNiNDdlYjAzYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjM0YWNkN2FmOTZkMTZmNDhlMmE2ZDMwZGVhYTg2ZGI4M2Q3ZGQwMGEwY2FjOTg2ZTAxNjA4ZWNiNDdlYjAzYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTQ0ZjJlMmFhNDA3ZGRhYWUwNTMyNzM1ZGYzOGI1ZDU0MDYxMmNiMDE2NjRjYmIxMTY2OWM0Yzg1MTg0OTlmMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTQ0ZjJlMmFhNDA3ZGRhYWUwNTMyNzM1ZGYzOGI1ZDU0MDYxMmNiMDE2NjRjYmIxMTY2OWM0Yzg1MTg0OTlmMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGU0MzcyZWViYjJiMTRjMGMxZjhmMTQ5MmI3YTNiNmQ2Yzk5MDJjNDQ2MGRhY2M2Y2EyMTAwZDY2OWU3OWJmOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGViY2YzOTY5NDVlNWI4MTFlYjI4NGEzZWZkZjI0YTg1YTQxN2U0Mjg0MWMyMTJiZGMyMGEzNWJmYzliNmY3NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGViY2YzOTY5NDVlNWI4MTFlYjI4NGEzZWZkZjI0YTg1YTQxN2U0Mjg0MWMyMTJiZGMyMGEzNWJmYzliNmY3NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGViY2YzOTY5NDVlNWI4MTFlYjI4NGEzZWZkZjI0YTg1YTQxN2U0Mjg0MWMyMTJiZGMyMGEzNWJmYzliNmY3NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTgyLWFmYWZmZjg4IiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM4LjAiLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGViY2YzOTY5NDVlNWI4MTFlYjI4NGEzZWZkZjI0YTg1YTQxN2U0Mjg0MWMyMTJiZGMyMGEzNWJmYzliNmY3NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTgyLWFmYWZmZjg4IiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiI4MjY1NjY1In0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjc0OGQxZjhjY2ViMTcyOWE4Y2ZlZmU1NWQ4M2QxMjVhYTViMDc4NjM0ZmRhYjUyNzZiYTg2MWE4OTMzNGEzNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"...,    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady",    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, +  Protocol: "", -  Protocol: "TCP",    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, +  Replicas: 0, +  Replicas: &2, -  Replicas: 2, -  Replicas: &2,    Replicas: &2, +  Replicas: &3, -  Replicas: 3, -  Replicas: &3,    Replicas: &3,    ResizePolicy: nil,    ResourceFieldRef: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, +  ResourceVersion: "", -  ResourceVersion: "1759237621126895010", -  ResourceVersion: "1759237805897199020", -  ResourceVersion: "1759237931952751010", -  ResourceVersion: "1759237969587679010", -  ResourceVersion: "1759238080825103010", -  ResourceVersion: "1759238122683647020", -  ResourceVersion: "1759238169938047010", -  ResourceVersion: "1759238236030959010", -  ResourceVersion: "1759238291130799010", -  ResourceVersion: "1759238410452431020", -  ResourceVersion: "1759238443379615010", -  ResourceVersion: "1759238563776031020", -  ResourceVersion: "1759238746812591020", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", -  SchedulerName: "default-scheduler",    SecretName: "internal-some-name",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil, sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "", -  Subresource: "status",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil, +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File",    TimeoutSeconds: 5, -  Time: s"2025-09-30 13:06:22 +0000 UTC", -  Time: s"2025-09-30 13:07:01 +0000 UTC", -  Time: s"2025-09-30 13:10:05 +0000 UTC", -  Time: s"2025-09-30 13:11:54 +0000 UTC", -  Time: s"2025-09-30 13:12:11 +0000 UTC", -  Time: s"2025-09-30 13:12:18 +0000 UTC", -  Time: s"2025-09-30 13:12:49 +0000 UTC", -  Time: s"2025-09-30 13:12:58 +0000 UTC", -  Time: s"2025-09-30 13:13:00 +0000 UTC", -  Time: s"2025-09-30 13:14:40 +0000 UTC", -  Time: s"2025-09-30 13:15:22 +0000 UTC", -  Time: s"2025-09-30 13:15:34 +0000 UTC", -  Time: s"2025-09-30 13:16:09 +0000 UTC", -  Time: s"2025-09-30 13:17:02 +0000 UTC", -  Time: s"2025-09-30 13:17:16 +0000 UTC", -  Time: s"2025-09-30 13:17:46 +0000 UTC", -  Time: s"2025-09-30 13:18:11 +0000 UTC", -  Time: s"2025-09-30 13:20:10 +0000 UTC", -  Time: s"2025-09-30 13:20:19 +0000 UTC", -  Time: s"2025-09-30 13:20:43 +0000 UTC", -  Time: s"2025-09-30 13:22:28 +0000 UTC", -  Time: s"2025-09-30 13:22:43 +0000 UTC", -  Time: s"2025-09-30 13:22:45 +0000 UTC", -  Time: s"2025-09-30 13:25:46 +0000 UTC",    Tolerations: nil, -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{},    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, +  UID: "", -  UID: "7c480c59-bc4f-485d-aa19-0d15b9e5932d", -  UID: "956e2f97-b157-4f51-9bb0-34d14b2b1a74", +  UpdatedReplicas: 0, -  UpdatedReplicas: 1, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-proxysql-6488fdbb67", -  UpdateRevision: "some-name-proxysql-6b9d978f95", -  UpdateRevision: "some-name-proxysql-6c47f79d9b", -  UpdateRevision: "some-name-proxysql-74b8c748f5", -  UpdateRevision: "some-name-proxysql-85d6478b4b", -  UpdateRevision: "some-name-proxysql-85f9bbd74", -  UpdateRevision: "some-name-pxc-6557dbb7fc", -  UpdateRevision: "some-name-pxc-6b47768cd6", -  UpdateRevision: "some-name-pxc-775fc9cc8b", -  UpdateRevision: "some-name-pxc-7f8fb47c69", -  UpdateRevision: "some-name-pxc-9f4c977f5",    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    Value: "", +  Value: "caching_sha2_password",    ValueFrom: nil,    ValueFrom: &v1.EnvVarSource{ -  Value: "mysql_native_password",    VolumeAttributesClassName: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil, -  VolumeMode: &"Filesystem", +  VolumeMode: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, -  VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get pxc --all-namespaces -o wide + kubectl patch pxc -n users-12601 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.P6wRqFCyO3 ++ mktemp + local LAST_ERR=/tmp/tmp.lkQqwYx8GK + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.P6wRqFCyO3 perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-12601 namespace + cat /tmp/tmp.lkQqwYx8GK + rm /tmp/tmp.P6wRqFCyO3 /tmp/tmp.lkQqwYx8GK + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.UQkN3B8x4p ++ mktemp + local LAST_ERR=/tmp/tmp.Q7YS2BIXF5 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UQkN3B8x4p No resources found + cat /tmp/tmp.Q7YS2BIXF5 + rm /tmp/tmp.UQkN3B8x4p /tmp/tmp.Q7YS2BIXF5 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.gave50nq9k ++ mktemp + local LAST_ERR=/tmp/tmp.vb1CfEOw7t + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gave50nq9k No resources found + cat /tmp/tmp.vb1CfEOw7t + rm /tmp/tmp.gave50nq9k /tmp/tmp.vb1CfEOw7t + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.5NgDMZV5u1 ++ mktemp + local LAST_ERR=/tmp/tmp.Z5fvjiyK56 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5NgDMZV5u1 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.Z5fvjiyK56 + rm /tmp/tmp.5NgDMZV5u1 /tmp/tmp.Z5fvjiyK56 + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-12601 + rm -rf /tmp/tmp.Y8WkxBbrUi + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.N6wwxDs7wD + local LAST_OUT=/tmp/tmp.kscEB5Tqjb ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.9YYWq5pp9J + local exit_status=0 + local LAST_ERR=/tmp/tmp.mFeU5tFMaS + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace users-12601 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator