Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/logs/users-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-9213 + local ns=users-9213 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-9084 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.KdQYxUgQ2E ++ mktemp + local LAST_ERR=/tmp/tmp.TX7MdAq2BB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KdQYxUgQ2E perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-9084 namespace + cat /tmp/tmp.TX7MdAq2BB + rm /tmp/tmp.KdQYxUgQ2E /tmp/tmp.TX7MdAq2BB + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.a8EA9shguw ++ mktemp + local LAST_ERR=/tmp/tmp.fUGbMNyEOh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.a8EA9shguw No resources found + cat /tmp/tmp.fUGbMNyEOh + rm /tmp/tmp.a8EA9shguw /tmp/tmp.fUGbMNyEOh + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.kxjLYKSbij ++ mktemp + local LAST_ERR=/tmp/tmp.J6KvqDwmsQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kxjLYKSbij No resources found + cat /tmp/tmp.J6KvqDwmsQ + rm /tmp/tmp.kxjLYKSbij /tmp/tmp.J6KvqDwmsQ + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.46gAkBuvY0 + local LAST_OUT=/tmp/tmp.e4lBSF0ye6 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.4vQQ14hfVn + local exit_status=0 + local LAST_ERR=/tmp/tmp.gnayD9B7Og + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.e4lBSF0ye6 + cat /tmp/tmp.gnayD9B7Og + rm /tmp/tmp.e4lBSF0ye6 /tmp/tmp.gnayD9B7Og + return 0 namespace "users-9084" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.46gAkBuvY0 namespace "pxc-operator" deleted + cat /tmp/tmp.4vQQ14hfVn + rm /tmp/tmp.46gAkBuvY0 /tmp/tmp.4vQQ14hfVn + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.nagtNMy7Xd ++ mktemp + local LAST_ERR=/tmp/tmp.BCNFrB9Pe8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nagtNMy7Xd namespace/pxc-operator created + cat /tmp/tmp.BCNFrB9Pe8 + rm /tmp/tmp.nagtNMy7Xd /tmp/tmp.BCNFrB9Pe8 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.MFwUwvFAP8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.puBYfPFcy0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MFwUwvFAP8 ++ cat /tmp/tmp.puBYfPFcy0 ++ rm /tmp/tmp.MFwUwvFAP8 /tmp/tmp.puBYfPFcy0 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2076-ee1be0ba-8-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.s5LRSCGXia ++ mktemp + local LAST_ERR=/tmp/tmp.7tN6lUJmIx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2076-ee1be0ba-8-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.s5LRSCGXia Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2076-ee1be0ba-8-cluster8" modified. + cat /tmp/tmp.7tN6lUJmIx + rm /tmp/tmp.s5LRSCGXia /tmp/tmp.7tN6lUJmIx + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.cTtYGA3m7X ++ mktemp + local LAST_ERR=/tmp/tmp.WJDOf8b0dR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cTtYGA3m7X customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.WJDOf8b0dR + rm /tmp/tmp.cTtYGA3m7X /tmp/tmp.WJDOf8b0dR + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.UP4GqFqcbK ++ mktemp + local LAST_ERR=/tmp/tmp.znJKV6EcBe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UP4GqFqcbK clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.znJKV6EcBe + rm /tmp/tmp.UP4GqFqcbK /tmp/tmp.znJKV6EcBe + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2076-ee1be0ba^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/deploy/cw-operator.yaml + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + local LAST_OUT=/tmp/tmp.hmYadx4gNx ++ mktemp + local LAST_ERR=/tmp/tmp.ITSzK3kU9C + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hmYadx4gNx deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.ITSzK3kU9C + rm /tmp/tmp.hmYadx4gNx /tmp/tmp.ITSzK3kU9C + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.WElPu4P1jh ++ mktemp + local LAST_ERR=/tmp/tmp.Q1FoKYExjy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WElPu4P1jh pod/percona-xtradb-cluster-operator-84658c6c47-s6jzj condition met + cat /tmp/tmp.Q1FoKYExjy + rm /tmp/tmp.WElPu4P1jh /tmp/tmp.Q1FoKYExjy + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.cbBAdsIoWT +++ mktemp ++ local LAST_ERR=/tmp/tmp.IYCCPjNhTf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cbBAdsIoWT ++ cat /tmp/tmp.IYCCPjNhTf ++ rm /tmp/tmp.cbBAdsIoWT /tmp/tmp.IYCCPjNhTf ++ return 0 + wait_pod percona-xtradb-cluster-operator-84658c6c47-s6jzj 480 pxc-operator + local pod=percona-xtradb-cluster-operator-84658c6c47-s6jzj + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-84658c6c47-s6jzj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-84658c6c47-s6jzj condition met waiting for pod/percona-xtradb-cluster-operator-84658c6c47-s6jzj to become Ready.Ok + sleep 3 + create_namespace users-9213 + local namespace=users-9213 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-9213' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-9213 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-9213 + kubectl_bin get ns ++ mktemp + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.pDEwq14EgW ++ mktemp + local LAST_OUT=/tmp/tmp.74rlgo66GG ++ mktemp + local LAST_ERR=/tmp/tmp.DHZoWkKDfg + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.WzpEsJJmjl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-9213 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-9213 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pDEwq14EgW + cat /tmp/tmp.DHZoWkKDfg + rm /tmp/tmp.pDEwq14EgW /tmp/tmp.DHZoWkKDfg + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-9213 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.74rlgo66GG + cat /tmp/tmp.WzpEsJJmjl Error from server (NotFound): namespaces "users-9213" not found + rm /tmp/tmp.74rlgo66GG /tmp/tmp.WzpEsJJmjl + return 1 + : + wait_for_delete namespace/users-9213 + local res=namespace/users-9213 + echo -n 'waiting for namespace/users-9213 to be deleted' waiting for namespace/users-9213 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-9213" not found + desc 'create namespace users-9213' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-9213 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-9213 ++ mktemp + local LAST_OUT=/tmp/tmp.NhKAql6BoA ++ mktemp + local LAST_ERR=/tmp/tmp.EXX4icnoYI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-9213 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NhKAql6BoA namespace/users-9213 created + cat /tmp/tmp.EXX4icnoYI + rm /tmp/tmp.NhKAql6BoA /tmp/tmp.EXX4icnoYI + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.HC8Bj30nCq +++ mktemp ++ local LAST_ERR=/tmp/tmp.fkdNreiEpm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HC8Bj30nCq ++ cat /tmp/tmp.fkdNreiEpm ++ rm /tmp/tmp.HC8Bj30nCq /tmp/tmp.fkdNreiEpm ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2076-ee1be0ba-8-cluster8 --namespace=users-9213 ++ mktemp + local LAST_OUT=/tmp/tmp.ZrBh8XPFlp ++ mktemp + local LAST_ERR=/tmp/tmp.yfyjWeKYb4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2076-ee1be0ba-8-cluster8 --namespace=users-9213 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZrBh8XPFlp Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2076-ee1be0ba-8-cluster8" modified. + cat /tmp/tmp.yfyjWeKYb4 + rm /tmp/tmp.ZrBh8XPFlp /tmp/tmp.yfyjWeKYb4 + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.p3gphxkVXQ ++ mktemp + local LAST_ERR=/tmp/tmp.eE3IrvFB8K + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.p3gphxkVXQ secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.eE3IrvFB8K + rm /tmp/tmp.p3gphxkVXQ /tmp/tmp.eE3IrvFB8K + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.7tb2V7npba ++ mktemp + local LAST_ERR=/tmp/tmp.USaMnCg9gt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7tb2V7npba secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.USaMnCg9gt + rm /tmp/tmp.7tb2V7npba /tmp/tmp.USaMnCg9gt + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/client.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + local LAST_OUT=/tmp/tmp.d48SLT43xz + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2076-ee1be0ba#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-9213~ ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + local LAST_ERR=/tmp/tmp.dIUR7gCS3H + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.d48SLT43xz deployment.apps/pxc-client created + cat /tmp/tmp.dIUR7gCS3H + rm /tmp/tmp.d48SLT43xz /tmp/tmp.dIUR7gCS3H + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/some-name.yml + local pvc_name= ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.wWKyao7tUN + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2076-ee1be0ba#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-9213~ + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_ERR=/tmp/tmp.RR6zlTHyFX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wWKyao7tUN perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.RR6zlTHyFX + rm /tmp/tmp.wWKyao7tUN /tmp/tmp.RR6zlTHyFX + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.q7kdTUendN ++++ mktemp +++ local LAST_ERR=/tmp/tmp.XLIY2oqKkW +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.q7kdTUendN +++ cat /tmp/tmp.XLIY2oqKkW +++ rm /tmp/tmp.q7kdTUendN /tmp/tmp.XLIY2oqKkW +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.tXjN7T1OWl ++++ mktemp +++ local LAST_ERR=/tmp/tmp.FaWuWb9YSg +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.tXjN7T1OWl +++ cat /tmp/tmp.FaWuWb9YSg +++ rm /tmp/tmp.tXjN7T1OWl /tmp/tmp.FaWuWb9YSg +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9213 ++ mktemp + local LAST_OUT=/tmp/tmp.OroNvYGhI6 ++ mktemp + local LAST_ERR=/tmp/tmp.bAPzWjhNVX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9213 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9213 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9213 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.OroNvYGhI6 + cat /tmp/tmp.bAPzWjhNVX error: no matching resources found + rm /tmp/tmp.OroNvYGhI6 /tmp/tmp.bAPzWjhNVX + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-proxysql-0 ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ECjM4eVWxI +++ mktemp ++ local LAST_ERR=/tmp/tmp.ez2AJooi4a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ECjM4eVWxI ++ cat /tmp/tmp.ez2AJooi4a ++ rm /tmp/tmp.ECjM4eVWxI /tmp/tmp.ez2AJooi4a ++ return 0 + local 'root_pass=cqRIu]nCVaQP{r}y' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tWVuHeGMaW +++ mktemp ++ local LAST_ERR=/tmp/tmp.WzSn4pFTj4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tWVuHeGMaW ++ cat /tmp/tmp.WzSn4pFTj4 ++ rm /tmp/tmp.tWVuHeGMaW /tmp/tmp.WzSn4pFTj4 ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mfDfoBoSzt +++ mktemp ++ local LAST_ERR=/tmp/tmp.3caanOMvHW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mfDfoBoSzt ++ cat /tmp/tmp.3caanOMvHW ++ rm /tmp/tmp.mfDfoBoSzt /tmp/tmp.3caanOMvHW ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LUkaFLI5EY +++ mktemp ++ local LAST_ERR=/tmp/tmp.QIBgj4eu3c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LUkaFLI5EY ++ cat /tmp/tmp.QIBgj4eu3c ++ rm /tmp/tmp.LUkaFLI5EY /tmp/tmp.QIBgj4eu3c ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-1.sql /tmp/tmp.tJZuaBgojY/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mQP8wGe805 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cmd2UeHQuQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mQP8wGe805 ++ cat /tmp/tmp.cmd2UeHQuQ ++ rm /tmp/tmp.mQP8wGe805 /tmp/tmp.cmd2UeHQuQ ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-1.sql /tmp/tmp.tJZuaBgojY/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''cqRIu]nCVaQP{r}y'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XcAJZHCVmE +++ mktemp ++ local LAST_ERR=/tmp/tmp.WRGV4vEnLM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XcAJZHCVmE ++ cat /tmp/tmp.WRGV4vEnLM ++ rm /tmp/tmp.XcAJZHCVmE /tmp/tmp.WRGV4vEnLM ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-1.sql /tmp/tmp.tJZuaBgojY/select-1.sql + is_keyring_plugin_in_use some-name + local cluster=some-name + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + grep -E -o 'early-plugin-load=keyring_\w+.so' ++ mktemp + local LAST_OUT=/tmp/tmp.EooyZXWMQF ++ mktemp + local LAST_ERR=/tmp/tmp.a1nzJg2fSu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EooyZXWMQF + cat /tmp/tmp.a1nzJg2fSu Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.EooyZXWMQF /tmp/tmp.a1nzJg2fSu + return 0 + return 1 + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.UFXPK0YDzi +++ mktemp ++ local LAST_ERR=/tmp/tmp.gVUgOm5ENU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UFXPK0YDzi ++ cat /tmp/tmp.gVUgOm5ENU ++ rm /tmp/tmp.UFXPK0YDzi /tmp/tmp.gVUgOm5ENU ++ return 0 + secret_pass='cqRIu]nCVaQP{r}y' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.BAjBwIHY7G +++ mktemp ++ local LAST_ERR=/tmp/tmp.nDeDdaChIQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BAjBwIHY7G ++ cat /tmp/tmp.nDeDdaChIQ ++ rm /tmp/tmp.BAjBwIHY7G /tmp/tmp.nDeDdaChIQ ++ return 0 + int_secret_pass='cqRIu]nCVaQP{r}y' + [[ -z cqRIu]nCVaQP{r}y ]] + [[ cqRIu]nCVaQP{r}y != \c\q\R\I\u\]\n\C\V\a\Q\P\{\r\}\y ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''cqRIu]nCVaQP{r}y'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''cqRIu]nCVaQP{r}y'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''cqRIu]nCVaQP{r}y'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''cqRIu]nCVaQP{r}y'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SMnlFlUBcl +++ mktemp ++ local LAST_ERR=/tmp/tmp.F9Tw6V7eqi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SMnlFlUBcl ++ cat /tmp/tmp.F9Tw6V7eqi ++ rm /tmp/tmp.SMnlFlUBcl /tmp/tmp.F9Tw6V7eqi ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.tJZuaBgojY/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.BBouzMXSUz +++ mktemp ++ local LAST_ERR=/tmp/tmp.EeAhHMX9e2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BBouzMXSUz ++ cat /tmp/tmp.EeAhHMX9e2 ++ rm /tmp/tmp.BBouzMXSUz /tmp/tmp.EeAhHMX9e2 ++ return 0 + secret_pass='7a*h}ozBxsSRkn>h.%w' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.P5qMAQnm1H +++ mktemp ++ local LAST_ERR=/tmp/tmp.LuLj3bJ9jb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P5qMAQnm1H ++ cat /tmp/tmp.LuLj3bJ9jb ++ rm /tmp/tmp.P5qMAQnm1H /tmp/tmp.LuLj3bJ9jb ++ return 0 + int_secret_pass='7a*h}ozBxsSRkn>h.%w' + [[ -z 7a*h}ozBxsSRkn>h.%w ]] + [[ 7a*h}ozBxsSRkn>h.%w != \7\a\*\h\}\o\z\B\x\s\S\R\k\n\>\h\.\%\w ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''7a*h}ozBxsSRkn>h.%w'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''7a*h}ozBxsSRkn>h.%w'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''7a*h}ozBxsSRkn>h.%w'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''7a*h}ozBxsSRkn>h.%w'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VzjIBLV6Pv +++ mktemp ++ local LAST_ERR=/tmp/tmp.RBoSIh8wte ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VzjIBLV6Pv ++ cat /tmp/tmp.RBoSIh8wte ++ rm /tmp/tmp.VzjIBLV6Pv /tmp/tmp.RBoSIh8wte ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.tJZuaBgojY/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.9A6ml63rVZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.VoCNQTHnqX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9A6ml63rVZ ++ cat /tmp/tmp.VoCNQTHnqX ++ rm /tmp/tmp.9A6ml63rVZ /tmp/tmp.VoCNQTHnqX ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.WdW1YMgXlS +++ mktemp ++ local LAST_ERR=/tmp/tmp.bIKTERQJSK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WdW1YMgXlS ++ cat /tmp/tmp.bIKTERQJSK ++ rm /tmp/tmp.WdW1YMgXlS /tmp/tmp.bIKTERQJSK ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Yct0p22n8r +++ mktemp ++ local LAST_ERR=/tmp/tmp.1gKZ236cjY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Yct0p22n8r ++ cat /tmp/tmp.1gKZ236cjY ++ rm /tmp/tmp.Yct0p22n8r /tmp/tmp.1gKZ236cjY ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.tJZuaBgojY/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.G5bKTWLXgF +++ mktemp ++ local LAST_ERR=/tmp/tmp.ruXS7ElzP6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G5bKTWLXgF ++ cat /tmp/tmp.ruXS7ElzP6 ++ rm /tmp/tmp.G5bKTWLXgF /tmp/tmp.ruXS7ElzP6 ++ return 0 + secret_pass='d^&C83E6t.TN,.{N' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.AHnEmRCgVH +++ mktemp ++ local LAST_ERR=/tmp/tmp.PkJKAb9UEN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AHnEmRCgVH ++ cat /tmp/tmp.PkJKAb9UEN ++ rm /tmp/tmp.AHnEmRCgVH /tmp/tmp.PkJKAb9UEN ++ return 0 + int_secret_pass='d^&C83E6t.TN,.{N' + [[ -z d^&C83E6t.TN,.{N ]] + [[ d^&C83E6t.TN,.{N != \d\^\&\C\8\3\E\6\t\.\T\N\,\.\{\N ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''d^&C83E6t.TN,.{N'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''d^&C83E6t.TN,.{N'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''d^&C83E6t.TN,.{N'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''d^&C83E6t.TN,.{N'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.tJZuaBgojY/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-2.sql /tmp/tmp.tJZuaBgojY/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.0GCOQBBRdu +++ mktemp ++ local LAST_ERR=/tmp/tmp.4qi6Kdd05d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0GCOQBBRdu ++ cat /tmp/tmp.4qi6Kdd05d ++ rm /tmp/tmp.0GCOQBBRdu /tmp/tmp.4qi6Kdd05d ++ return 0 + secret_pass=xxFo90c8SkdrRpHU ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.jVbRA33a4S +++ mktemp ++ local LAST_ERR=/tmp/tmp.OsRPlplKJI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jVbRA33a4S ++ cat /tmp/tmp.OsRPlplKJI ++ rm /tmp/tmp.jVbRA33a4S /tmp/tmp.OsRPlplKJI ++ return 0 + int_secret_pass=xxFo90c8SkdrRpHU + [[ -z xxFo90c8SkdrRpHU ]] + [[ xxFo90c8SkdrRpHU != \x\x\F\o\9\0\c\8\S\k\d\r\R\p\H\U ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''xxFo90c8SkdrRpHU'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''xxFo90c8SkdrRpHU'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''xxFo90c8SkdrRpHU'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''xxFo90c8SkdrRpHU'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.imjbcwWHUb +++ mktemp ++ local LAST_ERR=/tmp/tmp.xL3yI0MHLz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.imjbcwWHUb ++ cat /tmp/tmp.xL3yI0MHLz ++ rm /tmp/tmp.imjbcwWHUb /tmp/tmp.xL3yI0MHLz ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.tJZuaBgojY/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.JsnEZVY3DC +++ mktemp ++ local LAST_ERR=/tmp/tmp.DKP17MtaYC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JsnEZVY3DC ++ cat /tmp/tmp.DKP17MtaYC ++ rm /tmp/tmp.JsnEZVY3DC /tmp/tmp.DKP17MtaYC ++ return 0 + secret_pass='I$ua5hL{hqO[X=8@8H>' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.tol3sBhO8Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.RZtu0hxHZE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tol3sBhO8Z ++ cat /tmp/tmp.RZtu0hxHZE ++ rm /tmp/tmp.tol3sBhO8Z /tmp/tmp.RZtu0hxHZE ++ return 0 + int_secret_pass='I$ua5hL{hqO[X=8@8H>' + [[ -z I$ua5hL{hqO[X=8@8H> ]] + [[ I$ua5hL{hqO[X=8@8H> != \I\$\u\a\5\h\L\{\h\q\O\[\X\=\8\@\8\H\> ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''I$ua5hL{hqO[X=8@8H>'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''I$ua5hL{hqO[X=8@8H>'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''I$ua5hL{hqO[X=8@8H>'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''I$ua5hL{hqO[X=8@8H>'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lA24QruBA8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VaXwAUXD9r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lA24QruBA8 ++ cat /tmp/tmp.VaXwAUXD9r ++ rm /tmp/tmp.lA24QruBA8 /tmp/tmp.VaXwAUXD9r ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.tJZuaBgojY/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.VFIoR6UXSd ++ mktemp + local LAST_ERR=/tmp/tmp.VmsKaohpE3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VFIoR6UXSd secret/my-cluster-secrets patched + cat /tmp/tmp.VmsKaohpE3 + rm /tmp/tmp.VFIoR6UXSd /tmp/tmp.VmsKaohpE3 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b6xbylmbHY +++ mktemp ++ local LAST_ERR=/tmp/tmp.2EFYG5o3eW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b6xbylmbHY ++ cat /tmp/tmp.2EFYG5o3eW ++ rm /tmp/tmp.b6xbylmbHY /tmp/tmp.2EFYG5o3eW ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.tJZuaBgojY/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jBtaMksWAD ++ mktemp + local LAST_ERR=/tmp/tmp.hood7lbTrG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jBtaMksWAD perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.hood7lbTrG + rm /tmp/tmp.jBtaMksWAD /tmp/tmp.hood7lbTrG + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aqrccLkTGu +++ mktemp ++ local LAST_ERR=/tmp/tmp.htNUug0lMP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aqrccLkTGu ++ cat /tmp/tmp.htNUug0lMP ++ rm /tmp/tmp.aqrccLkTGu /tmp/tmp.htNUug0lMP ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lSeylLo53t +++ mktemp ++ local LAST_ERR=/tmp/tmp.21QUSNla9J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lSeylLo53t ++ cat /tmp/tmp.21QUSNla9J ++ rm /tmp/tmp.lSeylLo53t /tmp/tmp.21QUSNla9J ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xrKavsQRic ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SMuxcW0hc9 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xrKavsQRic +++++ cat /tmp/tmp.SMuxcW0hc9 +++++ rm /tmp/tmp.xrKavsQRic /tmp/tmp.SMuxcW0hc9 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ObodooIhz8 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ntpriw35Fn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ObodooIhz8 +++++ cat /tmp/tmp.ntpriw35Fn +++++ rm /tmp/tmp.ObodooIhz8 /tmp/tmp.ntpriw35Fn +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IhZPwSgk1Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.For2RmCcBI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IhZPwSgk1Y ++ cat /tmp/tmp.For2RmCcBI ++ rm /tmp/tmp.IhZPwSgk1Y /tmp/tmp.For2RmCcBI ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.j16xIRrORK ++ mktemp + local LAST_ERR=/tmp/tmp.8k84OPTkrB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.j16xIRrORK secret/my-cluster-secrets patched + cat /tmp/tmp.8k84OPTkrB + rm /tmp/tmp.j16xIRrORK /tmp/tmp.8k84OPTkrB + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fqen9elFHZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.DAe2DoXybJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fqen9elFHZ ++ cat /tmp/tmp.DAe2DoXybJ ++ rm /tmp/tmp.fqen9elFHZ /tmp/tmp.DAe2DoXybJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QVpNeTEYJo +++ mktemp ++ local LAST_ERR=/tmp/tmp.4pFgAgOavz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QVpNeTEYJo ++ cat /tmp/tmp.4pFgAgOavz ++ rm /tmp/tmp.QVpNeTEYJo /tmp/tmp.4pFgAgOavz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PdqXjzrgI7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3JcBXKOkpA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PdqXjzrgI7 ++ cat /tmp/tmp.3JcBXKOkpA ++ rm /tmp/tmp.PdqXjzrgI7 /tmp/tmp.3JcBXKOkpA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AQfqyLN1iV +++ mktemp ++ local LAST_ERR=/tmp/tmp.o2GJFinEI3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AQfqyLN1iV ++ cat /tmp/tmp.o2GJFinEI3 ++ rm /tmp/tmp.AQfqyLN1iV /tmp/tmp.o2GJFinEI3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BUVE3nFtL6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VpTtYUDOXV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BUVE3nFtL6 ++ cat /tmp/tmp.VpTtYUDOXV ++ rm /tmp/tmp.BUVE3nFtL6 /tmp/tmp.VpTtYUDOXV ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jOttEX2nMj +++ mktemp ++ local LAST_ERR=/tmp/tmp.yVtNJ1x90S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jOttEX2nMj ++ cat /tmp/tmp.yVtNJ1x90S ++ rm /tmp/tmp.jOttEX2nMj /tmp/tmp.yVtNJ1x90S ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.yGUCyONFob ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.InuRuNL8x6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.yGUCyONFob +++++ cat /tmp/tmp.InuRuNL8x6 +++++ rm /tmp/tmp.yGUCyONFob /tmp/tmp.InuRuNL8x6 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.PSoDIAbOsM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.fidPaGBguO +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.PSoDIAbOsM +++++ cat /tmp/tmp.fidPaGBguO +++++ rm /tmp/tmp.PSoDIAbOsM /tmp/tmp.fidPaGBguO +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L0vc9bVVTb +++ mktemp ++ local LAST_ERR=/tmp/tmp.4E1BwhCHCB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L0vc9bVVTb ++ cat /tmp/tmp.4E1BwhCHCB ++ rm /tmp/tmp.L0vc9bVVTb /tmp/tmp.4E1BwhCHCB ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.tJZuaBgojY/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-2.sql /tmp/tmp.tJZuaBgojY/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.tJZuaBgojY/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-2.sql /tmp/tmp.tJZuaBgojY/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.tJZuaBgojY/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-2.sql /tmp/tmp.tJZuaBgojY/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.lbusm4GxkL ++ mktemp + local LAST_ERR=/tmp/tmp.YmwJE06SzG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lbusm4GxkL perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.YmwJE06SzG + rm /tmp/tmp.lbusm4GxkL /tmp/tmp.YmwJE06SzG + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.nEbc19aOdB ++ mktemp + local LAST_ERR=/tmp/tmp.h71fs6ehJ0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nEbc19aOdB secret/my-cluster-secrets patched + cat /tmp/tmp.h71fs6ehJ0 + rm /tmp/tmp.nEbc19aOdB /tmp/tmp.h71fs6ehJ0 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9kb8jlOzI3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.d8G8YpiHiZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9kb8jlOzI3 ++ cat /tmp/tmp.d8G8YpiHiZ ++ rm /tmp/tmp.9kb8jlOzI3 /tmp/tmp.d8G8YpiHiZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yqSNHVxk69 +++ mktemp ++ local LAST_ERR=/tmp/tmp.iClgHZwtVn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yqSNHVxk69 ++ cat /tmp/tmp.iClgHZwtVn ++ rm /tmp/tmp.yqSNHVxk69 /tmp/tmp.iClgHZwtVn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EbYX6ofwIW +++ mktemp ++ local LAST_ERR=/tmp/tmp.Cx3UCYjV5l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EbYX6ofwIW ++ cat /tmp/tmp.Cx3UCYjV5l ++ rm /tmp/tmp.EbYX6ofwIW /tmp/tmp.Cx3UCYjV5l ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G0iCJMgLhY +++ mktemp ++ local LAST_ERR=/tmp/tmp.YoLiOotKm8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G0iCJMgLhY ++ cat /tmp/tmp.YoLiOotKm8 ++ rm /tmp/tmp.G0iCJMgLhY /tmp/tmp.YoLiOotKm8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0EKT4Ega8R +++ mktemp ++ local LAST_ERR=/tmp/tmp.7kNKmEKVXm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0EKT4Ega8R ++ cat /tmp/tmp.7kNKmEKVXm ++ rm /tmp/tmp.0EKT4Ega8R /tmp/tmp.7kNKmEKVXm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z03uLN2MoK +++ mktemp ++ local LAST_ERR=/tmp/tmp.hDsbP4KavE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z03uLN2MoK ++ cat /tmp/tmp.hDsbP4KavE ++ rm /tmp/tmp.Z03uLN2MoK /tmp/tmp.hDsbP4KavE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s8EjgpRhwb +++ mktemp ++ local LAST_ERR=/tmp/tmp.TVhLMXRulX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s8EjgpRhwb ++ cat /tmp/tmp.TVhLMXRulX ++ rm /tmp/tmp.s8EjgpRhwb /tmp/tmp.TVhLMXRulX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MCWZtVbpfW +++ mktemp ++ local LAST_ERR=/tmp/tmp.iDTZo42deN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MCWZtVbpfW ++ cat /tmp/tmp.iDTZo42deN ++ rm /tmp/tmp.MCWZtVbpfW /tmp/tmp.iDTZo42deN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a4PTQ8h9bB +++ mktemp ++ local LAST_ERR=/tmp/tmp.sqgEpliKYr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a4PTQ8h9bB ++ cat /tmp/tmp.sqgEpliKYr ++ rm /tmp/tmp.a4PTQ8h9bB /tmp/tmp.sqgEpliKYr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uMrxk0S1dq +++ mktemp ++ local LAST_ERR=/tmp/tmp.wN3eQYaBSo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uMrxk0S1dq ++ cat /tmp/tmp.wN3eQYaBSo ++ rm /tmp/tmp.uMrxk0S1dq /tmp/tmp.wN3eQYaBSo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V1keA8WS9j +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z2h4S3hG6U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V1keA8WS9j ++ cat /tmp/tmp.Z2h4S3hG6U ++ rm /tmp/tmp.V1keA8WS9j /tmp/tmp.Z2h4S3hG6U ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0Eeq2zw73k +++ mktemp ++ local LAST_ERR=/tmp/tmp.pMKKaIQlZt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0Eeq2zw73k ++ cat /tmp/tmp.pMKKaIQlZt ++ rm /tmp/tmp.0Eeq2zw73k /tmp/tmp.pMKKaIQlZt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iAdHjlb7qM +++ mktemp ++ local LAST_ERR=/tmp/tmp.XFDvLmtaQP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iAdHjlb7qM ++ cat /tmp/tmp.XFDvLmtaQP ++ rm /tmp/tmp.iAdHjlb7qM /tmp/tmp.XFDvLmtaQP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OpIm7RWgyi +++ mktemp ++ local LAST_ERR=/tmp/tmp.TZDPYAmzFN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OpIm7RWgyi ++ cat /tmp/tmp.TZDPYAmzFN ++ rm /tmp/tmp.OpIm7RWgyi /tmp/tmp.TZDPYAmzFN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V39tuVm3EM +++ mktemp ++ local LAST_ERR=/tmp/tmp.U3Nd9Vkxr9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V39tuVm3EM ++ cat /tmp/tmp.U3Nd9Vkxr9 ++ rm /tmp/tmp.V39tuVm3EM /tmp/tmp.U3Nd9Vkxr9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Kwm4DVMugC +++ mktemp ++ local LAST_ERR=/tmp/tmp.WxQF8878HP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Kwm4DVMugC ++ cat /tmp/tmp.WxQF8878HP ++ rm /tmp/tmp.Kwm4DVMugC /tmp/tmp.WxQF8878HP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Odlt0srTq5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6TmV9qfnZX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Odlt0srTq5 ++ cat /tmp/tmp.6TmV9qfnZX ++ rm /tmp/tmp.Odlt0srTq5 /tmp/tmp.6TmV9qfnZX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TpVEpuz6mY +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jojt77EDZL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TpVEpuz6mY ++ cat /tmp/tmp.Jojt77EDZL ++ rm /tmp/tmp.TpVEpuz6mY /tmp/tmp.Jojt77EDZL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4EDJAi3Dpq +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gdvt6BDknK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4EDJAi3Dpq ++ cat /tmp/tmp.Gdvt6BDknK ++ rm /tmp/tmp.4EDJAi3Dpq /tmp/tmp.Gdvt6BDknK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9aQJGJQRgP +++ mktemp ++ local LAST_ERR=/tmp/tmp.fJfdseuiAG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9aQJGJQRgP ++ cat /tmp/tmp.fJfdseuiAG ++ rm /tmp/tmp.9aQJGJQRgP /tmp/tmp.fJfdseuiAG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zj7iajDeTt +++ mktemp ++ local LAST_ERR=/tmp/tmp.TK8zHb6m4I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zj7iajDeTt ++ cat /tmp/tmp.TK8zHb6m4I ++ rm /tmp/tmp.Zj7iajDeTt /tmp/tmp.TK8zHb6m4I ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OqEriOBxQN +++ mktemp ++ local LAST_ERR=/tmp/tmp.CC9FbkNEmp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OqEriOBxQN ++ cat /tmp/tmp.CC9FbkNEmp ++ rm /tmp/tmp.OqEriOBxQN /tmp/tmp.CC9FbkNEmp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yRscBfZgLy +++ mktemp ++ local LAST_ERR=/tmp/tmp.UMAN9wnjsl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yRscBfZgLy ++ cat /tmp/tmp.UMAN9wnjsl ++ rm /tmp/tmp.yRscBfZgLy /tmp/tmp.UMAN9wnjsl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z9TTAwyc77 +++ mktemp ++ local LAST_ERR=/tmp/tmp.COvzAsfUit ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z9TTAwyc77 ++ cat /tmp/tmp.COvzAsfUit ++ rm /tmp/tmp.Z9TTAwyc77 /tmp/tmp.COvzAsfUit ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.66CTmG3J8u +++ mktemp ++ local LAST_ERR=/tmp/tmp.dSl7bYhTt0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.66CTmG3J8u ++ cat /tmp/tmp.dSl7bYhTt0 ++ rm /tmp/tmp.66CTmG3J8u /tmp/tmp.dSl7bYhTt0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TBgGcQg8ws +++ mktemp ++ local LAST_ERR=/tmp/tmp.f9KV14h6d8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TBgGcQg8ws ++ cat /tmp/tmp.f9KV14h6d8 ++ rm /tmp/tmp.TBgGcQg8ws /tmp/tmp.f9KV14h6d8 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8l5NSjoyq4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZCzCK0JfVc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8l5NSjoyq4 ++ cat /tmp/tmp.ZCzCK0JfVc ++ rm /tmp/tmp.8l5NSjoyq4 /tmp/tmp.ZCzCK0JfVc ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Ox8WioLTvc ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.URqIXcZjsE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Ox8WioLTvc +++++ cat /tmp/tmp.URqIXcZjsE +++++ rm /tmp/tmp.Ox8WioLTvc /tmp/tmp.URqIXcZjsE +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kUiuKLgIiK ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7obt0IhbC3 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kUiuKLgIiK +++++ cat /tmp/tmp.7obt0IhbC3 +++++ rm /tmp/tmp.kUiuKLgIiK /tmp/tmp.7obt0IhbC3 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U53V6IoK8W +++ mktemp ++ local LAST_ERR=/tmp/tmp.UhDQmHiOhE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U53V6IoK8W ++ cat /tmp/tmp.UhDQmHiOhE ++ rm /tmp/tmp.U53V6IoK8W /tmp/tmp.UhDQmHiOhE ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-3-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.tJZuaBgojY/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-3.sql /tmp/tmp.tJZuaBgojY/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.nqwmomWFp0 ++ mktemp + local LAST_ERR=/tmp/tmp.c5x9yVamJ5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nqwmomWFp0 secret/my-cluster-secrets patched + cat /tmp/tmp.c5x9yVamJ5 + rm /tmp/tmp.nqwmomWFp0 /tmp/tmp.c5x9yVamJ5 + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Hpkcnwjlm +++ mktemp ++ local LAST_ERR=/tmp/tmp.1hGmg66vtQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Hpkcnwjlm ++ cat /tmp/tmp.1hGmg66vtQ ++ rm /tmp/tmp.3Hpkcnwjlm /tmp/tmp.1hGmg66vtQ ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cY6SnAh01L +++ mktemp ++ local LAST_ERR=/tmp/tmp.AJOx7ARAFV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cY6SnAh01L ++ cat /tmp/tmp.AJOx7ARAFV ++ rm /tmp/tmp.cY6SnAh01L /tmp/tmp.AJOx7ARAFV ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace {"additional_password": "$A$005$\\n%0v w\\u0014\\\\Y*\\u0001Gf\\u001er|/K,KKvHJVTpc9vCh3fNF4QF7rDgSUwcsWioYo5RCSf9Qk5"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L0yvTKIcgz +++ mktemp ++ local LAST_ERR=/tmp/tmp.DTxgbMvFmH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L0yvTKIcgz ++ cat /tmp/tmp.DTxgbMvFmH ++ rm /tmp/tmp.L0yvTKIcgz /tmp/tmp.DTxgbMvFmH ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VGbWQVk2cx +++ mktemp ++ local LAST_ERR=/tmp/tmp.g0J7PZIAXa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VGbWQVk2cx ++ cat /tmp/tmp.g0J7PZIAXa ++ rm /tmp/tmp.VGbWQVk2cx /tmp/tmp.g0J7PZIAXa ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ro6r9Vz315 +++ mktemp ++ local LAST_ERR=/tmp/tmp.CskbmpjEPD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ro6r9Vz315 ++ cat /tmp/tmp.CskbmpjEPD ++ rm /tmp/tmp.Ro6r9Vz315 /tmp/tmp.CskbmpjEPD ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kn0Hbl0rFS +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZczXur8w6t ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kn0Hbl0rFS ++ cat /tmp/tmp.ZczXur8w6t ++ rm /tmp/tmp.kn0Hbl0rFS /tmp/tmp.ZczXur8w6t ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HIdM4RF5DT +++ mktemp ++ local LAST_ERR=/tmp/tmp.slvF9BkV91 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HIdM4RF5DT ++ cat /tmp/tmp.slvF9BkV91 ++ rm /tmp/tmp.HIdM4RF5DT /tmp/tmp.slvF9BkV91 ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RlwWDzPNNA +++ mktemp ++ local LAST_ERR=/tmp/tmp.jY4muMPPKM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RlwWDzPNNA ++ cat /tmp/tmp.jY4muMPPKM ++ rm /tmp/tmp.RlwWDzPNNA /tmp/tmp.jY4muMPPKM ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ftTIbz6JVT +++ mktemp ++ local LAST_ERR=/tmp/tmp.7hh2aG36oB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ftTIbz6JVT ++ cat /tmp/tmp.7hh2aG36oB ++ rm /tmp/tmp.ftTIbz6JVT /tmp/tmp.7hh2aG36oB ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2dmoEJfC7M +++ mktemp ++ local LAST_ERR=/tmp/tmp.bj4CMgeePm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2dmoEJfC7M ++ cat /tmp/tmp.bj4CMgeePm ++ rm /tmp/tmp.2dmoEJfC7M /tmp/tmp.bj4CMgeePm ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rI9UFjmjPv +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lg6q15Xf4C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rI9UFjmjPv ++ cat /tmp/tmp.Lg6q15Xf4C ++ rm /tmp/tmp.rI9UFjmjPv /tmp/tmp.Lg6q15Xf4C ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bia3Hk4unK +++ mktemp ++ local LAST_ERR=/tmp/tmp.QgSuzJYttq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bia3Hk4unK ++ cat /tmp/tmp.QgSuzJYttq ++ rm /tmp/tmp.bia3Hk4unK /tmp/tmp.QgSuzJYttq ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bPUmJUFgVm +++ mktemp ++ local LAST_ERR=/tmp/tmp.kSTZ60Q7ZC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bPUmJUFgVm ++ cat /tmp/tmp.kSTZ60Q7ZC ++ rm /tmp/tmp.bPUmJUFgVm /tmp/tmp.kSTZ60Q7ZC ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z1kH89gOqR +++ mktemp ++ local LAST_ERR=/tmp/tmp.MX4PSHrg7z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z1kH89gOqR ++ cat /tmp/tmp.MX4PSHrg7z ++ rm /tmp/tmp.z1kH89gOqR /tmp/tmp.MX4PSHrg7z ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rCuVSBMsxh +++ mktemp ++ local LAST_ERR=/tmp/tmp.i5Go6DImOp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rCuVSBMsxh ++ cat /tmp/tmp.i5Go6DImOp ++ rm /tmp/tmp.rCuVSBMsxh /tmp/tmp.i5Go6DImOp ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 13 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DVqvWMrQoV +++ mktemp ++ local LAST_ERR=/tmp/tmp.VAtBTjvIJp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DVqvWMrQoV ++ cat /tmp/tmp.VAtBTjvIJp ++ rm /tmp/tmp.DVqvWMrQoV /tmp/tmp.VAtBTjvIJp ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 14 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jAxe6hHqJb +++ mktemp ++ local LAST_ERR=/tmp/tmp.pqBLN5Hukw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jAxe6hHqJb ++ cat /tmp/tmp.pqBLN5Hukw ++ rm /tmp/tmp.jAxe6hHqJb /tmp/tmp.pqBLN5Hukw ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NPLdnoVC9Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.4mC7FZvt7V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NPLdnoVC9Y ++ cat /tmp/tmp.4mC7FZvt7V ++ rm /tmp/tmp.NPLdnoVC9Y /tmp/tmp.4mC7FZvt7V ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pDMOZsl0Wt +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lwu1IDTppb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pDMOZsl0Wt ++ cat /tmp/tmp.Lwu1IDTppb ++ rm /tmp/tmp.pDMOZsl0Wt /tmp/tmp.Lwu1IDTppb ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ow1h1x84TO ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.LJHoX24bh9 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ow1h1x84TO +++++ cat /tmp/tmp.LJHoX24bh9 +++++ rm /tmp/tmp.ow1h1x84TO /tmp/tmp.LJHoX24bh9 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ayKZS29uET ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.0VcRgYjhDq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ayKZS29uET +++++ cat /tmp/tmp.0VcRgYjhDq +++++ rm /tmp/tmp.ayKZS29uET /tmp/tmp.0VcRgYjhDq +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IN6eAumV5P +++ mktemp ++ local LAST_ERR=/tmp/tmp.YkcUSLh3Bn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IN6eAumV5P ++ cat /tmp/tmp.YkcUSLh3Bn ++ rm /tmp/tmp.IN6eAumV5P /tmp/tmp.YkcUSLh3Bn ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hzyyilI6TT +++ mktemp ++ local LAST_ERR=/tmp/tmp.dBzBQBH0n5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hzyyilI6TT ++ cat /tmp/tmp.dBzBQBH0n5 ++ rm /tmp/tmp.hzyyilI6TT /tmp/tmp.dBzBQBH0n5 ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.tJZuaBgojY/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.TOhKNc4vyn ++ mktemp + local LAST_ERR=/tmp/tmp.GxrR9Uc0Rl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TOhKNc4vyn secret/my-cluster-secrets patched + cat /tmp/tmp.GxrR9Uc0Rl + rm /tmp/tmp.TOhKNc4vyn /tmp/tmp.GxrR9Uc0Rl + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UFiYmY3tLW +++ mktemp ++ local LAST_ERR=/tmp/tmp.2ytwkrWWpY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UFiYmY3tLW ++ cat /tmp/tmp.2ytwkrWWpY ++ rm /tmp/tmp.UFiYmY3tLW /tmp/tmp.2ytwkrWWpY ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BlZ7WwOjxG +++ mktemp ++ local LAST_ERR=/tmp/tmp.4Ky18EtcoR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BlZ7WwOjxG ++ cat /tmp/tmp.4Ky18EtcoR ++ rm /tmp/tmp.BlZ7WwOjxG /tmp/tmp.4Ky18EtcoR ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Lby9kZEMEa ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.tn5hD4wO13 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Lby9kZEMEa +++++ cat /tmp/tmp.tn5hD4wO13 +++++ rm /tmp/tmp.Lby9kZEMEa /tmp/tmp.tn5hD4wO13 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.w4mlVhNrVG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.U9XKqlOlh7 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.w4mlVhNrVG +++++ cat /tmp/tmp.U9XKqlOlh7 +++++ rm /tmp/tmp.w4mlVhNrVG /tmp/tmp.U9XKqlOlh7 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6vAb8xeeUV +++ mktemp ++ local LAST_ERR=/tmp/tmp.VudeMmEMjE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6vAb8xeeUV ++ cat /tmp/tmp.VudeMmEMjE ++ rm /tmp/tmp.6vAb8xeeUV /tmp/tmp.VudeMmEMjE ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aYwRlETdiu +++ mktemp ++ local LAST_ERR=/tmp/tmp.NFkjsF3ssj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aYwRlETdiu ++ cat /tmp/tmp.NFkjsF3ssj ++ rm /tmp/tmp.aYwRlETdiu /tmp/tmp.NFkjsF3ssj ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.tJZuaBgojY/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.UIiouQavtE ++ mktemp + local LAST_ERR=/tmp/tmp.1NqUpLfSB2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UIiouQavtE perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.1NqUpLfSB2 + rm /tmp/tmp.UIiouQavtE /tmp/tmp.1NqUpLfSB2 + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7naxvyJluZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.k19mANkXve ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7naxvyJluZ ++ cat /tmp/tmp.k19mANkXve ++ rm /tmp/tmp.7naxvyJluZ /tmp/tmp.k19mANkXve ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SdsCFLMhm1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.shFiw59IOT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SdsCFLMhm1 ++ cat /tmp/tmp.shFiw59IOT ++ rm /tmp/tmp.SdsCFLMhm1 /tmp/tmp.shFiw59IOT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fP30BQf8N6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uxaDXxYHQZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fP30BQf8N6 ++ cat /tmp/tmp.uxaDXxYHQZ ++ rm /tmp/tmp.fP30BQf8N6 /tmp/tmp.uxaDXxYHQZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LNsKuBAXrX +++ mktemp ++ local LAST_ERR=/tmp/tmp.iRKYnOw1hh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LNsKuBAXrX ++ cat /tmp/tmp.iRKYnOw1hh ++ rm /tmp/tmp.LNsKuBAXrX /tmp/tmp.iRKYnOw1hh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MUUOXGXh1G +++ mktemp ++ local LAST_ERR=/tmp/tmp.9exEHQgHyv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MUUOXGXh1G ++ cat /tmp/tmp.9exEHQgHyv ++ rm /tmp/tmp.MUUOXGXh1G /tmp/tmp.9exEHQgHyv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xGncsDPdER +++ mktemp ++ local LAST_ERR=/tmp/tmp.5GnzNC4onZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xGncsDPdER ++ cat /tmp/tmp.5GnzNC4onZ ++ rm /tmp/tmp.xGncsDPdER /tmp/tmp.5GnzNC4onZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sMeiXoj00K +++ mktemp ++ local LAST_ERR=/tmp/tmp.OBA9WmWMth ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sMeiXoj00K ++ cat /tmp/tmp.OBA9WmWMth ++ rm /tmp/tmp.sMeiXoj00K /tmp/tmp.OBA9WmWMth ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3SpK518n8R +++ mktemp ++ local LAST_ERR=/tmp/tmp.qGyK4PdkgC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3SpK518n8R ++ cat /tmp/tmp.qGyK4PdkgC ++ rm /tmp/tmp.3SpK518n8R /tmp/tmp.qGyK4PdkgC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rf9s1js1nT +++ mktemp ++ local LAST_ERR=/tmp/tmp.CFTcTdwrFP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rf9s1js1nT ++ cat /tmp/tmp.CFTcTdwrFP ++ rm /tmp/tmp.rf9s1js1nT /tmp/tmp.CFTcTdwrFP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sOK2pcBdHJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.JUkBgv5dwJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sOK2pcBdHJ ++ cat /tmp/tmp.JUkBgv5dwJ ++ rm /tmp/tmp.sOK2pcBdHJ /tmp/tmp.JUkBgv5dwJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uLA82Umu9a +++ mktemp ++ local LAST_ERR=/tmp/tmp.WNWjKnXyWw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uLA82Umu9a ++ cat /tmp/tmp.WNWjKnXyWw ++ rm /tmp/tmp.uLA82Umu9a /tmp/tmp.WNWjKnXyWw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F04uZ22Jqw +++ mktemp ++ local LAST_ERR=/tmp/tmp.vjQMVr2vXo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F04uZ22Jqw ++ cat /tmp/tmp.vjQMVr2vXo ++ rm /tmp/tmp.F04uZ22Jqw /tmp/tmp.vjQMVr2vXo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g5M8vZu8We +++ mktemp ++ local LAST_ERR=/tmp/tmp.YieHDYWFZ4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g5M8vZu8We ++ cat /tmp/tmp.YieHDYWFZ4 ++ rm /tmp/tmp.g5M8vZu8We /tmp/tmp.YieHDYWFZ4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GzDS9HRkFQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.RoSYiVNKj3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GzDS9HRkFQ ++ cat /tmp/tmp.RoSYiVNKj3 ++ rm /tmp/tmp.GzDS9HRkFQ /tmp/tmp.RoSYiVNKj3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KJB9tJV9oB +++ mktemp ++ local LAST_ERR=/tmp/tmp.M58lezQhzD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KJB9tJV9oB ++ cat /tmp/tmp.M58lezQhzD ++ rm /tmp/tmp.KJB9tJV9oB /tmp/tmp.M58lezQhzD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fmZvpadnj4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.anUnhKnXOF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fmZvpadnj4 ++ cat /tmp/tmp.anUnhKnXOF ++ rm /tmp/tmp.fmZvpadnj4 /tmp/tmp.anUnhKnXOF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6tU8ggrG15 +++ mktemp ++ local LAST_ERR=/tmp/tmp.wmG3ZKGSNk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6tU8ggrG15 ++ cat /tmp/tmp.wmG3ZKGSNk ++ rm /tmp/tmp.6tU8ggrG15 /tmp/tmp.wmG3ZKGSNk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tJJqVBBJaV +++ mktemp ++ local LAST_ERR=/tmp/tmp.yQVtiLFFD4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tJJqVBBJaV ++ cat /tmp/tmp.yQVtiLFFD4 ++ rm /tmp/tmp.tJJqVBBJaV /tmp/tmp.yQVtiLFFD4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JiO1flxB51 +++ mktemp ++ local LAST_ERR=/tmp/tmp.S5hQ74o0Nf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JiO1flxB51 ++ cat /tmp/tmp.S5hQ74o0Nf ++ rm /tmp/tmp.JiO1flxB51 /tmp/tmp.S5hQ74o0Nf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SP7KjPBRfq +++ mktemp ++ local LAST_ERR=/tmp/tmp.3cEQs1NPBS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SP7KjPBRfq ++ cat /tmp/tmp.3cEQs1NPBS ++ rm /tmp/tmp.SP7KjPBRfq /tmp/tmp.3cEQs1NPBS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UNkSsxMUiD +++ mktemp ++ local LAST_ERR=/tmp/tmp.mfaV2fbFQj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UNkSsxMUiD ++ cat /tmp/tmp.mfaV2fbFQj ++ rm /tmp/tmp.UNkSsxMUiD /tmp/tmp.mfaV2fbFQj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ROzygsWGNa +++ mktemp ++ local LAST_ERR=/tmp/tmp.cNqAjuiexO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ROzygsWGNa ++ cat /tmp/tmp.cNqAjuiexO ++ rm /tmp/tmp.ROzygsWGNa /tmp/tmp.cNqAjuiexO ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W7RRA8D1A2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mGV68o9sy0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W7RRA8D1A2 ++ cat /tmp/tmp.mGV68o9sy0 ++ rm /tmp/tmp.W7RRA8D1A2 /tmp/tmp.mGV68o9sy0 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.SkgxsywzXJ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.10cHmRIogm +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.SkgxsywzXJ +++++ cat /tmp/tmp.10cHmRIogm +++++ rm /tmp/tmp.SkgxsywzXJ /tmp/tmp.10cHmRIogm +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.hO4VPA9i71 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Cu5iPgnMQA +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.hO4VPA9i71 +++++ cat /tmp/tmp.Cu5iPgnMQA +++++ rm /tmp/tmp.hO4VPA9i71 /tmp/tmp.Cu5iPgnMQA +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9vifvYqktw +++ mktemp ++ local LAST_ERR=/tmp/tmp.3vMqNh4jeb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9vifvYqktw ++ cat /tmp/tmp.3vMqNh4jeb ++ rm /tmp/tmp.9vifvYqktw /tmp/tmp.3vMqNh4jeb ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.CMbfC0Nmls ++ mktemp + local LAST_ERR=/tmp/tmp.Bqfe9Cb6Xx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CMbfC0Nmls secret/my-cluster-secrets-2 patched + cat /tmp/tmp.Bqfe9Cb6Xx + rm /tmp/tmp.CMbfC0Nmls /tmp/tmp.Bqfe9Cb6Xx + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UAlbON0vfC +++ mktemp ++ local LAST_ERR=/tmp/tmp.P8vE2M35dH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UAlbON0vfC ++ cat /tmp/tmp.P8vE2M35dH ++ rm /tmp/tmp.UAlbON0vfC /tmp/tmp.P8vE2M35dH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pye4WBa86M +++ mktemp ++ local LAST_ERR=/tmp/tmp.9maoJs9X5x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pye4WBa86M ++ cat /tmp/tmp.9maoJs9X5x ++ rm /tmp/tmp.pye4WBa86M /tmp/tmp.9maoJs9X5x ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mTpWrXdDGH +++ mktemp ++ local LAST_ERR=/tmp/tmp.y4IKHM2JSo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mTpWrXdDGH ++ cat /tmp/tmp.y4IKHM2JSo ++ rm /tmp/tmp.mTpWrXdDGH /tmp/tmp.y4IKHM2JSo ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8asLD6jZHE ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.DDmg0mY6QL +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8asLD6jZHE +++++ cat /tmp/tmp.DDmg0mY6QL +++++ rm /tmp/tmp.8asLD6jZHE /tmp/tmp.DDmg0mY6QL +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.dyPh6SOAsO ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.QY3LlEPdq6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.dyPh6SOAsO +++++ cat /tmp/tmp.QY3LlEPdq6 +++++ rm /tmp/tmp.dyPh6SOAsO /tmp/tmp.QY3LlEPdq6 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z5fHyqmhjX +++ mktemp ++ local LAST_ERR=/tmp/tmp.mYW4ru5FW5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z5fHyqmhjX ++ cat /tmp/tmp.mYW4ru5FW5 ++ rm /tmp/tmp.z5fHyqmhjX /tmp/tmp.mYW4ru5FW5 ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wz69p27Gqj +++ mktemp ++ local LAST_ERR=/tmp/tmp.xWN9DtnJtw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Wz69p27Gqj ++ cat /tmp/tmp.xWN9DtnJtw ++ rm /tmp/tmp.Wz69p27Gqj /tmp/tmp.xWN9DtnJtw ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.tJZuaBgojY/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.CKScW7k9fw +++ mktemp ++ local LAST_ERR=/tmp/tmp.JhBCHuwpnD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CKScW7k9fw ++ cat /tmp/tmp.JhBCHuwpnD ++ rm /tmp/tmp.CKScW7k9fw /tmp/tmp.JhBCHuwpnD ++ return 0 + newpass='V!%gXdjl&(aTe%xYYn' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''V!%gXdjl&(aTe%xYYn'\'';' '-h some-name-pxc -uroot -p'\''V!%gXdjl&(aTe%xYYn'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''V!%gXdjl&(aTe%xYYn'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''V!%gXdjl&(aTe%xYYn'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EMHvgZ6vtO +++ mktemp ++ local LAST_ERR=/tmp/tmp.z65szWzSvd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EMHvgZ6vtO ++ cat /tmp/tmp.z65szWzSvd ++ rm /tmp/tmp.EMHvgZ6vtO /tmp/tmp.z65szWzSvd ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''V!%gXdjl&(aTe%xYYn'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''V!%gXdjl&(aTe%xYYn'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''V!%gXdjl&(aTe%xYYn'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''V!%gXdjl&(aTe%xYYn'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wHVrn2VRVl +++ mktemp ++ local LAST_ERR=/tmp/tmp.BeNKJFIRXl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wHVrn2VRVl ++ cat /tmp/tmp.BeNKJFIRXl ++ rm /tmp/tmp.wHVrn2VRVl /tmp/tmp.BeNKJFIRXl ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.tJZuaBgojY/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.2GxlMJ7ux8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.OEgPjYRH1q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2GxlMJ7ux8 ++ cat /tmp/tmp.OEgPjYRH1q ++ rm /tmp/tmp.2GxlMJ7ux8 /tmp/tmp.OEgPjYRH1q ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.0ntDSYzrXx ++ mktemp + local LAST_ERR=/tmp/tmp.skU2gOOVyb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0ntDSYzrXx secret/my-cluster-secrets-2 configured + cat /tmp/tmp.skU2gOOVyb Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.0ntDSYzrXx /tmp/tmp.skU2gOOVyb + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mI9pIslHii +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ytt49VIHK5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mI9pIslHii ++ cat /tmp/tmp.Ytt49VIHK5 ++ rm /tmp/tmp.mI9pIslHii /tmp/tmp.Ytt49VIHK5 ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.tJZuaBgojY/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + local pvc_name= + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.5ZvnEjkqQR + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2076-ee1be0ba#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-9213~ + local LAST_ERR=/tmp/tmp.vEG2otGwsb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5ZvnEjkqQR perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.vEG2otGwsb + rm /tmp/tmp.5ZvnEjkqQR /tmp/tmp.vEG2otGwsb + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7ivM483S2m +++ mktemp ++ local LAST_ERR=/tmp/tmp.H0aVnhY3Oo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7ivM483S2m ++ cat /tmp/tmp.H0aVnhY3Oo ++ rm /tmp/tmp.7ivM483S2m /tmp/tmp.H0aVnhY3Oo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZCdeGlpCwz +++ mktemp ++ local LAST_ERR=/tmp/tmp.2R1rNJtrOd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZCdeGlpCwz ++ cat /tmp/tmp.2R1rNJtrOd ++ rm /tmp/tmp.ZCdeGlpCwz /tmp/tmp.2R1rNJtrOd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aMPK8A6bed +++ mktemp ++ local LAST_ERR=/tmp/tmp.dwyvIpFNH1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aMPK8A6bed ++ cat /tmp/tmp.dwyvIpFNH1 ++ rm /tmp/tmp.aMPK8A6bed /tmp/tmp.dwyvIpFNH1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yWzpzj4WUX +++ mktemp ++ local LAST_ERR=/tmp/tmp.OLqz0mTKlX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yWzpzj4WUX ++ cat /tmp/tmp.OLqz0mTKlX ++ rm /tmp/tmp.yWzpzj4WUX /tmp/tmp.OLqz0mTKlX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XJyXYbxqfH +++ mktemp ++ local LAST_ERR=/tmp/tmp.KfDnpWMwN2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XJyXYbxqfH ++ cat /tmp/tmp.KfDnpWMwN2 ++ rm /tmp/tmp.XJyXYbxqfH /tmp/tmp.KfDnpWMwN2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.94J8kS5Y0K +++ mktemp ++ local LAST_ERR=/tmp/tmp.GI7gNk2FvT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.94J8kS5Y0K ++ cat /tmp/tmp.GI7gNk2FvT ++ rm /tmp/tmp.94J8kS5Y0K /tmp/tmp.GI7gNk2FvT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zCqDrbedCI +++ mktemp ++ local LAST_ERR=/tmp/tmp.1yrvT4vcjR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zCqDrbedCI ++ cat /tmp/tmp.1yrvT4vcjR ++ rm /tmp/tmp.zCqDrbedCI /tmp/tmp.1yrvT4vcjR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mv2diJ2QO9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.TV4yQg44BX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Mv2diJ2QO9 ++ cat /tmp/tmp.TV4yQg44BX ++ rm /tmp/tmp.Mv2diJ2QO9 /tmp/tmp.TV4yQg44BX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OeVUoUD1EF +++ mktemp ++ local LAST_ERR=/tmp/tmp.zS2wIORb2I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OeVUoUD1EF ++ cat /tmp/tmp.zS2wIORb2I ++ rm /tmp/tmp.OeVUoUD1EF /tmp/tmp.zS2wIORb2I ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BW1LNogLD1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WkImiPooac ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BW1LNogLD1 ++ cat /tmp/tmp.WkImiPooac ++ rm /tmp/tmp.BW1LNogLD1 /tmp/tmp.WkImiPooac ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.14twQE8q3z +++ mktemp ++ local LAST_ERR=/tmp/tmp.2UAmmMkDYL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.14twQE8q3z ++ cat /tmp/tmp.2UAmmMkDYL ++ rm /tmp/tmp.14twQE8q3z /tmp/tmp.2UAmmMkDYL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zsKzuHKr49 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tU0pIXFblL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zsKzuHKr49 ++ cat /tmp/tmp.tU0pIXFblL ++ rm /tmp/tmp.zsKzuHKr49 /tmp/tmp.tU0pIXFblL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dl4Cs52XYF +++ mktemp ++ local LAST_ERR=/tmp/tmp.RZcfgHuN8Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dl4Cs52XYF ++ cat /tmp/tmp.RZcfgHuN8Q ++ rm /tmp/tmp.dl4Cs52XYF /tmp/tmp.RZcfgHuN8Q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RsBaHgimt3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kq3ytKwlpr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RsBaHgimt3 ++ cat /tmp/tmp.kq3ytKwlpr ++ rm /tmp/tmp.RsBaHgimt3 /tmp/tmp.kq3ytKwlpr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B4AvSHonz7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0pFkLvfWTY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B4AvSHonz7 ++ cat /tmp/tmp.0pFkLvfWTY ++ rm /tmp/tmp.B4AvSHonz7 /tmp/tmp.0pFkLvfWTY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qrjk9dmd8K +++ mktemp ++ local LAST_ERR=/tmp/tmp.qt5sNmjnAt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qrjk9dmd8K ++ cat /tmp/tmp.qt5sNmjnAt ++ rm /tmp/tmp.Qrjk9dmd8K /tmp/tmp.qt5sNmjnAt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QilazCmma4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VW55Q11YR1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QilazCmma4 ++ cat /tmp/tmp.VW55Q11YR1 ++ rm /tmp/tmp.QilazCmma4 /tmp/tmp.VW55Q11YR1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zwk90fI7vt +++ mktemp ++ local LAST_ERR=/tmp/tmp.QMxKKhU7K8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zwk90fI7vt ++ cat /tmp/tmp.QMxKKhU7K8 ++ rm /tmp/tmp.zwk90fI7vt /tmp/tmp.QMxKKhU7K8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n9YdbnIWkb +++ mktemp ++ local LAST_ERR=/tmp/tmp.mUTf129n3V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n9YdbnIWkb ++ cat /tmp/tmp.mUTf129n3V ++ rm /tmp/tmp.n9YdbnIWkb /tmp/tmp.mUTf129n3V ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B4fIerUFKw +++ mktemp ++ local LAST_ERR=/tmp/tmp.eymFJRTOsW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B4fIerUFKw ++ cat /tmp/tmp.eymFJRTOsW ++ rm /tmp/tmp.B4fIerUFKw /tmp/tmp.eymFJRTOsW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UJxxqCTqa2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tcTAIHpTYN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UJxxqCTqa2 ++ cat /tmp/tmp.tcTAIHpTYN ++ rm /tmp/tmp.UJxxqCTqa2 /tmp/tmp.tcTAIHpTYN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gyOoBXT335 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Byid3LpQUK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gyOoBXT335 ++ cat /tmp/tmp.Byid3LpQUK ++ rm /tmp/tmp.gyOoBXT335 /tmp/tmp.Byid3LpQUK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eVYIz3h5ew +++ mktemp ++ local LAST_ERR=/tmp/tmp.4mCEMJn09I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eVYIz3h5ew ++ cat /tmp/tmp.4mCEMJn09I ++ rm /tmp/tmp.eVYIz3h5ew /tmp/tmp.4mCEMJn09I ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.22JjCRi0ry +++ mktemp ++ local LAST_ERR=/tmp/tmp.B3Rqbc9olr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.22JjCRi0ry ++ cat /tmp/tmp.B3Rqbc9olr ++ rm /tmp/tmp.22JjCRi0ry /tmp/tmp.B3Rqbc9olr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WY6FKTb9to +++ mktemp ++ local LAST_ERR=/tmp/tmp.4ahdzc0Dum ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WY6FKTb9to ++ cat /tmp/tmp.4ahdzc0Dum ++ rm /tmp/tmp.WY6FKTb9to /tmp/tmp.4ahdzc0Dum ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i1OkpMeLHw +++ mktemp ++ local LAST_ERR=/tmp/tmp.PKP8ZNrkSl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i1OkpMeLHw ++ cat /tmp/tmp.PKP8ZNrkSl ++ rm /tmp/tmp.i1OkpMeLHw /tmp/tmp.PKP8ZNrkSl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YOwMxWAvlI +++ mktemp ++ local LAST_ERR=/tmp/tmp.rLZr4a53MO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YOwMxWAvlI ++ cat /tmp/tmp.rLZr4a53MO ++ rm /tmp/tmp.YOwMxWAvlI /tmp/tmp.rLZr4a53MO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lu7vI4aPSQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.qXSkTvdE8B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lu7vI4aPSQ ++ cat /tmp/tmp.qXSkTvdE8B ++ rm /tmp/tmp.Lu7vI4aPSQ /tmp/tmp.qXSkTvdE8B ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wiW0qCkn9O +++ mktemp ++ local LAST_ERR=/tmp/tmp.ipWxfzhIlS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wiW0qCkn9O ++ cat /tmp/tmp.ipWxfzhIlS ++ rm /tmp/tmp.wiW0qCkn9O /tmp/tmp.ipWxfzhIlS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c1aIvyEYX7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NKUb5mjMsg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c1aIvyEYX7 ++ cat /tmp/tmp.NKUb5mjMsg ++ rm /tmp/tmp.c1aIvyEYX7 /tmp/tmp.NKUb5mjMsg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ALQnf3R09N +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lm44wDDRXO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ALQnf3R09N ++ cat /tmp/tmp.Lm44wDDRXO ++ rm /tmp/tmp.ALQnf3R09N /tmp/tmp.Lm44wDDRXO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GLx0evYNnw +++ mktemp ++ local LAST_ERR=/tmp/tmp.2KbtcJppEr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GLx0evYNnw ++ cat /tmp/tmp.2KbtcJppEr ++ rm /tmp/tmp.GLx0evYNnw /tmp/tmp.2KbtcJppEr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8gJ4Fd7koR +++ mktemp ++ local LAST_ERR=/tmp/tmp.Utgb2bcZJO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8gJ4Fd7koR ++ cat /tmp/tmp.Utgb2bcZJO ++ rm /tmp/tmp.8gJ4Fd7koR /tmp/tmp.Utgb2bcZJO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CQNklutfFC +++ mktemp ++ local LAST_ERR=/tmp/tmp.LB20uEmnD9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CQNklutfFC ++ cat /tmp/tmp.LB20uEmnD9 ++ rm /tmp/tmp.CQNklutfFC /tmp/tmp.LB20uEmnD9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.niawYbaGkY +++ mktemp ++ local LAST_ERR=/tmp/tmp.MUqJKim5h5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.niawYbaGkY ++ cat /tmp/tmp.MUqJKim5h5 ++ rm /tmp/tmp.niawYbaGkY /tmp/tmp.MUqJKim5h5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iG2uSL1hdn +++ mktemp ++ local LAST_ERR=/tmp/tmp.HJO0bxDox6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iG2uSL1hdn ++ cat /tmp/tmp.HJO0bxDox6 ++ rm /tmp/tmp.iG2uSL1hdn /tmp/tmp.HJO0bxDox6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.34gtj6b6RT +++ mktemp ++ local LAST_ERR=/tmp/tmp.RVYJ2DrsWO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.34gtj6b6RT ++ cat /tmp/tmp.RVYJ2DrsWO ++ rm /tmp/tmp.34gtj6b6RT /tmp/tmp.RVYJ2DrsWO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.psFbxB3WHa +++ mktemp ++ local LAST_ERR=/tmp/tmp.ro0frUXfJu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.psFbxB3WHa ++ cat /tmp/tmp.ro0frUXfJu ++ rm /tmp/tmp.psFbxB3WHa /tmp/tmp.ro0frUXfJu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lfQcuAhNuk +++ mktemp ++ local LAST_ERR=/tmp/tmp.i76sUiv7LU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lfQcuAhNuk ++ cat /tmp/tmp.i76sUiv7LU ++ rm /tmp/tmp.lfQcuAhNuk /tmp/tmp.i76sUiv7LU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tDMOKnNKe4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0vlcLDKsHt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tDMOKnNKe4 ++ cat /tmp/tmp.0vlcLDKsHt ++ rm /tmp/tmp.tDMOKnNKe4 /tmp/tmp.0vlcLDKsHt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FKl6gfzZ9H +++ mktemp ++ local LAST_ERR=/tmp/tmp.U50gYCkLu4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FKl6gfzZ9H ++ cat /tmp/tmp.U50gYCkLu4 ++ rm /tmp/tmp.FKl6gfzZ9H /tmp/tmp.U50gYCkLu4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DyJaur5ToQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.cM1khslCzz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DyJaur5ToQ ++ cat /tmp/tmp.cM1khslCzz ++ rm /tmp/tmp.DyJaur5ToQ /tmp/tmp.cM1khslCzz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6KtOKCQA6d +++ mktemp ++ local LAST_ERR=/tmp/tmp.FGP6oRLhYg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6KtOKCQA6d ++ cat /tmp/tmp.FGP6oRLhYg ++ rm /tmp/tmp.6KtOKCQA6d /tmp/tmp.FGP6oRLhYg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nVqGypWPJq +++ mktemp ++ local LAST_ERR=/tmp/tmp.oo76KmMh4a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nVqGypWPJq ++ cat /tmp/tmp.oo76KmMh4a ++ rm /tmp/tmp.nVqGypWPJq /tmp/tmp.oo76KmMh4a ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KR9d04Dg6a +++ mktemp ++ local LAST_ERR=/tmp/tmp.TXiN9YuHqI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KR9d04Dg6a ++ cat /tmp/tmp.TXiN9YuHqI ++ rm /tmp/tmp.KR9d04Dg6a /tmp/tmp.TXiN9YuHqI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xwHJXuZZ4r +++ mktemp ++ local LAST_ERR=/tmp/tmp.0uf1GJyJPc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xwHJXuZZ4r ++ cat /tmp/tmp.0uf1GJyJPc ++ rm /tmp/tmp.xwHJXuZZ4r /tmp/tmp.0uf1GJyJPc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.54h5ymoEJr +++ mktemp ++ local LAST_ERR=/tmp/tmp.XgbALPOuIC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.54h5ymoEJr ++ cat /tmp/tmp.XgbALPOuIC ++ rm /tmp/tmp.54h5ymoEJr /tmp/tmp.XgbALPOuIC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JivqneAhOe +++ mktemp ++ local LAST_ERR=/tmp/tmp.DqlRf2WklY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JivqneAhOe ++ cat /tmp/tmp.DqlRf2WklY ++ rm /tmp/tmp.JivqneAhOe /tmp/tmp.DqlRf2WklY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aXi4W9ve1v +++ mktemp ++ local LAST_ERR=/tmp/tmp.2nzW1peQFw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aXi4W9ve1v ++ cat /tmp/tmp.2nzW1peQFw ++ rm /tmp/tmp.aXi4W9ve1v /tmp/tmp.2nzW1peQFw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QDHVOBGDQS +++ mktemp ++ local LAST_ERR=/tmp/tmp.YtW2ChnzKv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QDHVOBGDQS ++ cat /tmp/tmp.YtW2ChnzKv ++ rm /tmp/tmp.QDHVOBGDQS /tmp/tmp.YtW2ChnzKv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RCjJl37PDt +++ mktemp ++ local LAST_ERR=/tmp/tmp.AWcD9GtyRa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RCjJl37PDt ++ cat /tmp/tmp.AWcD9GtyRa ++ rm /tmp/tmp.RCjJl37PDt /tmp/tmp.AWcD9GtyRa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QLI4NJYukS +++ mktemp ++ local LAST_ERR=/tmp/tmp.pocwBEJvpX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QLI4NJYukS ++ cat /tmp/tmp.pocwBEJvpX ++ rm /tmp/tmp.QLI4NJYukS /tmp/tmp.pocwBEJvpX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 51 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dz0faPrkin +++ mktemp ++ local LAST_ERR=/tmp/tmp.H1ZxuXY7x7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dz0faPrkin ++ cat /tmp/tmp.H1ZxuXY7x7 ++ rm /tmp/tmp.Dz0faPrkin /tmp/tmp.H1ZxuXY7x7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 52 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.714kTdRP24 +++ mktemp ++ local LAST_ERR=/tmp/tmp.E6T4ekidoC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.714kTdRP24 ++ cat /tmp/tmp.E6T4ekidoC ++ rm /tmp/tmp.714kTdRP24 /tmp/tmp.E6T4ekidoC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 53 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7axLOHNRnQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.zuOOEFmPgm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7axLOHNRnQ ++ cat /tmp/tmp.zuOOEFmPgm ++ rm /tmp/tmp.7axLOHNRnQ /tmp/tmp.zuOOEFmPgm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 54 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QJnPzJaJq1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DXaNwSvu8g ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QJnPzJaJq1 ++ cat /tmp/tmp.DXaNwSvu8g ++ rm /tmp/tmp.QJnPzJaJq1 /tmp/tmp.DXaNwSvu8g ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 55 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5iCPow4Zow +++ mktemp ++ local LAST_ERR=/tmp/tmp.aGywlQk6Dj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5iCPow4Zow ++ cat /tmp/tmp.aGywlQk6Dj ++ rm /tmp/tmp.5iCPow4Zow /tmp/tmp.aGywlQk6Dj ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hr5FDZHdWL +++ mktemp ++ local LAST_ERR=/tmp/tmp.UHHezdRhSD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hr5FDZHdWL ++ cat /tmp/tmp.UHHezdRhSD ++ rm /tmp/tmp.hr5FDZHdWL /tmp/tmp.UHHezdRhSD ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ylaKmuDMzL ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xyJYR8ouvY +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ylaKmuDMzL +++++ cat /tmp/tmp.xyJYR8ouvY +++++ rm /tmp/tmp.ylaKmuDMzL /tmp/tmp.xyJYR8ouvY +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BvXbESo8BH +++ mktemp ++ local LAST_ERR=/tmp/tmp.dEkSNxHjas ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BvXbESo8BH ++ cat /tmp/tmp.dEkSNxHjas ++ rm /tmp/tmp.BvXbESo8BH /tmp/tmp.dEkSNxHjas ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MIUPi6rNk2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rb1aeXTiXj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MIUPi6rNk2 ++ cat /tmp/tmp.rb1aeXTiXj ++ rm /tmp/tmp.MIUPi6rNk2 /tmp/tmp.rb1aeXTiXj ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.1nLbuTT68P ++ mktemp + local LAST_ERR=/tmp/tmp.67MgliON5x + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1nLbuTT68P secret/my-cluster-secrets patched + cat /tmp/tmp.67MgliON5x + rm /tmp/tmp.1nLbuTT68P /tmp/tmp.67MgliON5x + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PZDALkeVCG +++ mktemp ++ local LAST_ERR=/tmp/tmp.HNliZ4JUQO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PZDALkeVCG ++ cat /tmp/tmp.HNliZ4JUQO ++ rm /tmp/tmp.PZDALkeVCG /tmp/tmp.HNliZ4JUQO ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Plh4Z4PgZt +++ mktemp ++ local LAST_ERR=/tmp/tmp.HOOXhBuBT5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Plh4Z4PgZt ++ cat /tmp/tmp.HOOXhBuBT5 ++ rm /tmp/tmp.Plh4Z4PgZt /tmp/tmp.HOOXhBuBT5 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JYTLGmDt78 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.fEGHJYroEJ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JYTLGmDt78 +++++ cat /tmp/tmp.fEGHJYroEJ +++++ rm /tmp/tmp.JYTLGmDt78 /tmp/tmp.fEGHJYroEJ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.25BAXc8KjL +++ mktemp ++ local LAST_ERR=/tmp/tmp.GLTn765kNr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.25BAXc8KjL ++ cat /tmp/tmp.GLTn765kNr ++ rm /tmp/tmp.25BAXc8KjL /tmp/tmp.GLTn765kNr ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7nDDvhVPGj +++ mktemp ++ local LAST_ERR=/tmp/tmp.3GNgMLLu8Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7nDDvhVPGj ++ cat /tmp/tmp.3GNgMLLu8Y ++ rm /tmp/tmp.7nDDvhVPGj /tmp/tmp.3GNgMLLu8Y ++ return 0 + client_pod=pxc-client-59944c5bbf-s6vm8 + wait_pod pxc-client-59944c5bbf-s6vm8 + local pod=pxc-client-59944c5bbf-s6vm8 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-s6vm8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-s6vm8 condition met waiting for pod/pxc-client-59944c5bbf-s6vm8 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.tJZuaBgojY/select-3.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.tJZuaBgojY/select-3.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2076/e2e-tests/users/compare/select-3.sql /tmp/tmp.tJZuaBgojY/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8ApAPMMgd6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.i3b5PmgOg4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8ApAPMMgd6 ++ cat /tmp/tmp.i3b5PmgOg4 ++ rm /tmp/tmp.8ApAPMMgd6 /tmp/tmp.i3b5PmgOg4 ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-9213 + local namespace=users-9213 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + sort -u ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.tJZuaBgojY/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.0w42uMA91H +++ mktemp ++ local LAST_ERR=/tmp/tmp.YSSEoTAnwd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0w42uMA91H ++ cat /tmp/tmp.YSSEoTAnwd ++ rm /tmp/tmp.0w42uMA91H /tmp/tmp.YSSEoTAnwd ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-84658c6c47-s6jzj ++ mktemp + local LAST_OUT=/tmp/tmp.MEFdvwrS1o ++ mktemp + local LAST_ERR=/tmp/tmp.wX8NSWG6lV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-84658c6c47-s6jzj + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MEFdvwrS1o + cat /tmp/tmp.wX8NSWG6lV + rm /tmp/tmp.MEFdvwrS1o /tmp/tmp.wX8NSWG6lV + return 0 } }, }, { }, }, { }, }, ""), }, { }, }, }, - }, - { - }, - { - }, - }, + }, "0", "1", ... // 16 identical fields ... // 16 identical fields + "1cb9ef72733a47c6f82a4c60f627a13ef7b76d2563ba830b2f0692f67", 2025-12-04T13:21:19.440Z INFO setup Manager starting up {"gitCommit": "ee1be0bad2174aa25222aa09ad9903b8a418344c", "gitBranch": "PR-2076-ee1be0ba", "buildTime": "2025-12-04T10:59:38Z", "goVersion": "go1.25.5", "os": "linux", "arch": "amd64"} 2025-12-04T13:21:19.440Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.14-gke.1033000"} 2025-12-04T13:21:19.443Z INFO setup Registering Components. 2025-12-04T13:21:20.420Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-12-04T13:21:20.420Z INFO setup Starting the Cmd. 2025-12-04T13:21:20.421Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-12-04T13:21:20.421Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-12-04T13:21:20.421Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-12-04T13:21:20.421Z INFO controller-runtime.metrics Starting metrics server 2025-12-04T13:21:20.421Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-12-04T13:21:20.421Z INFO controller-runtime.webhook Starting webhook server 2025-12-04T13:21:20.421Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-12-04T13:21:20.522Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-12-04T13:21:20.550Z DEBUG events percona-xtradb-cluster-operator-84658c6c47-s6jzj_dc274de7-7550-4f05-9871-af66e09f0084 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"15a9197d-39de-4e3c-8d9f-78780d182e12","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1764854480544703009"}, "reason": "LeaderElection"} 2025-12-04T13:21:20.550Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-12-04T13:21:20.551Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-12-04T13:21:20.551Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-12-04T13:21:20.551Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2025-12-04T13:21:20.551Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-12-04T13:21:20.652Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2025-12-04T13:21:20.652Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2025-12-04T13:21:20.652Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2025-12-04T13:21:20.652Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2025-12-04T13:21:20.652Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2025-12-04T13:21:20.652Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2025-12-04T13:21:57.199Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1c84403e-ec4c-42f9-8476-d3628d00b8f0", "version": "1.19.0"} 2025-12-04T13:21:57.322Z INFO User secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1c84403e-ec4c-42f9-8476-d3628d00b8f0", "secrets": "my-cluster-secrets"} 2025-12-04T13:21:57.537Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1c84403e-ec4c-42f9-8476-d3628d00b8f0", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-12-04T13:21:57.658Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1c84403e-ec4c-42f9-8476-d3628d00b8f0", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-04T13:21:57.698Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1c84403e-ec4c-42f9-8476-d3628d00b8f0", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-04T13:21:57.777Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1c84403e-ec4c-42f9-8476-d3628d00b8f0", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-04T13:21:57.839Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1c84403e-ec4c-42f9-8476-d3628d00b8f0", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-04T13:21:57.867Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1c84403e-ec4c-42f9-8476-d3628d00b8f0", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-04T13:21:58.025Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1c84403e-ec4c-42f9-8476-d3628d00b8f0", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-04T13:21:58.420Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "06d7e847-eb59-47eb-90b9-6d918af62690", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-04T13:21:58.443Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "06d7e847-eb59-47eb-90b9-6d918af62690", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-04T13:23:14.613Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "39d2b8cc-eae6-4e9d-9a47-ce3ce1c0ecef", "user": "operator"} 2025-12-04T13:23:14.646Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "39d2b8cc-eae6-4e9d-9a47-ce3ce1c0ecef", "user": "monitor"} 2025-12-04T13:23:14.689Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "39d2b8cc-eae6-4e9d-9a47-ce3ce1c0ecef"} 2025-12-04T13:23:14.722Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "39d2b8cc-eae6-4e9d-9a47-ce3ce1c0ecef"} 2025-12-04T13:23:14.751Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "39d2b8cc-eae6-4e9d-9a47-ce3ce1c0ecef", "user": "xtrabackup"} 2025-12-04T13:23:14.787Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "39d2b8cc-eae6-4e9d-9a47-ce3ce1c0ecef"} 2025-12-04T13:23:14.815Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "39d2b8cc-eae6-4e9d-9a47-ce3ce1c0ecef", "user": "replication"} 2025-12-04T13:23:14.824Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "39d2b8cc-eae6-4e9d-9a47-ce3ce1c0ecef", "err": "get primary pxc pod: not found"} 2025-12-04T13:23:19.577Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1cca22c1-954e-4db1-91df-22af211f7181", "err": "get primary pxc pod: not found"} 2025-12-04T13:23:24.707Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "f6ea3e4d-282a-4ac3-8886-cc2dbdef760e", "err": "get primary pxc pod: not found"} 2025-12-04T13:23:29.839Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "05c99ba8-4638-49b3-8ed2-42663e7c2418", "err": "get primary pxc pod: not found"} 2025-12-04T13:25:40.289Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "935957cb-625d-40c2-9430-ea352074de03", "user": "root"} 2025-12-04T13:25:40.410Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "935957cb-625d-40c2-9430-ea352074de03", "new version": "8.0.43-34.1"} 2025-12-04T13:25:41.936Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "935957cb-625d-40c2-9430-ea352074de03"} 2025-12-04T13:25:47.415Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "5374243f-c6c3-4a73-aff0-4f119401cf10"} 2025-12-04T13:25:52.313Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "595305c4-b43b-498c-8e91-b2e4d6b32207"} 2025-12-04T13:25:57.621Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "eb255bd0-a9ef-4afb-be09-7e01133051a3"} 2025-12-04T13:26:03.605Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "b03f3c97-5d7c-4867-b315-3790c1d8c03d"} 2025-12-04T13:26:08.734Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e8269df6-d4c4-41a6-baa6-266b17a224ab"} 2025-12-04T13:26:14.020Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "c3897514-9f45-43de-a1c5-b897f4892921"} 2025-12-04T13:26:19.290Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "8b02dffd-18b1-4594-9c0d-5b7d8bf0fa16"} 2025-12-04T13:26:24.714Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "53ae1341-bdc6-42d9-b72d-adf7d75422ba"} 2025-12-04T13:26:29.918Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "0ee10fa4-cd52-4f25-b4a1-48d257699909"} 2025-12-04T13:26:35.294Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "d09f5d83-f0d0-4467-8880-313f665bcd6c"} 2025-12-04T13:26:40.523Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9f0a084a-c367-4715-82bd-daa3fc132ce4"} 2025-12-04T13:26:46.044Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "b8915b6f-4893-4a1e-9c6e-81f087b749ad"} 2025-12-04T13:26:50.838Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "27079115-cdfe-4b4d-9c64-e53bf153833f"} 2025-12-04T13:26:56.419Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "4d86b878-f4ff-4df2-8542-dfcfbcadc22c"} 2025-12-04T13:27:01.584Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "57815a4b-63e3-4ca8-a061-9a37e104fbd3"} 2025-12-04T13:27:06.715Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "2a42b04e-39ae-4494-a569-495bafc088d1"} 2025-12-04T13:27:09.975Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "0d7ed672-2ca3-425e-876a-a51b07975c5e", "user": "root"} 2025-12-04T13:27:09.999Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "0d7ed672-2ca3-425e-876a-a51b07975c5e", "user": "root"} 2025-12-04T13:27:10.042Z INFO MySQL init secret created {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "0d7ed672-2ca3-425e-876a-a51b07975c5e", "secret": "some-name-mysql-init", "user": "root"} 2025-12-04T13:27:12.321Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "0d7ed672-2ca3-425e-876a-a51b07975c5e"} 2025-12-04T13:27:12.365Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "0d7ed672-2ca3-425e-876a-a51b07975c5e", "user": "root"} 2025-12-04T13:27:12.386Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "0d7ed672-2ca3-425e-876a-a51b07975c5e", "user": "root"} 2025-12-04T13:27:14.096Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "0d7ed672-2ca3-425e-876a-a51b07975c5e"} 2025-12-04T13:27:19.631Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1ff97101-701f-46fa-b59a-02bc09d9e64e"} 2025-12-04T13:27:24.730Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "2270fbea-8492-479b-8e14-ad22e540338c"} 2025-12-04T13:27:28.724Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "ee946496-ad5c-456d-a518-cce66eae13da", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:27:28.794Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "ee946496-ad5c-456d-a518-cce66eae13da", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:27:30.111Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "60ba9e6f-1b30-47f9-820d-5f1dc205693a"} 2025-12-04T13:27:53.194Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "8d44f688-8dd2-4fd4-b522-57fe5d3a23bb", "user": "proxyadmin"} 2025-12-04T13:27:53.194Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "8d44f688-8dd2-4fd4-b522-57fe5d3a23bb", "user": "proxyadmin"} 2025-12-04T13:27:53.223Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "8d44f688-8dd2-4fd4-b522-57fe5d3a23bb", "user": "proxyadmin"} 2025-12-04T13:27:53.246Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "8d44f688-8dd2-4fd4-b522-57fe5d3a23bb", "user": "proxyadmin"} 2025-12-04T13:27:53.246Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "8d44f688-8dd2-4fd4-b522-57fe5d3a23bb", "last-applied-secret": "0e22d3e3ba01dd0f6831b84d96f6d47709033092925d0046a00948e30237344e"} 2025-12-04T13:27:53.253Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "8d44f688-8dd2-4fd4-b522-57fe5d3a23bb", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:27:53.638Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1717180a-c3a6-477b-80d5-f39f85c19945", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-04T13:28:11.009Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "510c5a93-4df2-4c7a-827b-793ef1def805", "err": "get primary pxc pod: not found"} 2025-12-04T13:28:35.623Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "006cc171-3982-4b9f-8c45-f7070d010931"} 2025-12-04T13:28:41.036Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "2fb772d2-c85f-4e3f-a59f-4d3e2ce7f357"} 2025-12-04T13:28:42.451Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "373cbe44-c3e6-4bb2-baf5-f8407f57807b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:28:42.499Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "373cbe44-c3e6-4bb2-baf5-f8407f57807b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:28:44.444Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "373cbe44-c3e6-4bb2-baf5-f8407f57807b"} 2025-12-04T13:28:49.411Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "7a9b358a-6d3b-46bc-a036-db0514884408", "user": "xtrabackup"} 2025-12-04T13:28:49.425Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "7a9b358a-6d3b-46bc-a036-db0514884408", "user": "xtrabackup"} 2025-12-04T13:28:49.443Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "7a9b358a-6d3b-46bc-a036-db0514884408", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-04T13:28:49.460Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "7a9b358a-6d3b-46bc-a036-db0514884408", "user": "xtrabackup"} 2025-12-04T13:28:49.470Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "7a9b358a-6d3b-46bc-a036-db0514884408", "user": "xtrabackup"} 2025-12-04T13:28:49.478Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "7a9b358a-6d3b-46bc-a036-db0514884408", "last-applied-secret": "33b72f9a984ad44ecf5b9c6b43b47a91a83ef9121a8bc72979eea0ca046dbc33"} 2025-12-04T13:28:49.481Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "7a9b358a-6d3b-46bc-a036-db0514884408", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:28:51.978Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "7a9b358a-6d3b-46bc-a036-db0514884408"} 2025-12-04T13:29:37.619Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1be86e4c-9582-4fe7-838e-ae6f450a6785", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9213 on 34.118.224.10:53: no such host"} 2025-12-04T13:29:48.257Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "ced3b641-c6b0-40f1-8445-4fd370f85501", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: invalid connection"} 2025-12-04T13:30:34.926Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "c4cb3a9b-2590-418a-b90d-868f241c5393", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.1.82.41:33062: connect: connection refused"} 2025-12-04T13:30:40.149Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "73d88097-9a13-4d75-b18e-069b28874d5b", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:30:45.277Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "86ffc3fb-c98e-442a-85b4-9ef2a2fc49c3", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:30:50.415Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "ae2bed51-8cef-4a62-8848-90d5975243c0", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:30:55.532Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "079c97e1-a3a8-48eb-96e1-1afb154b6124", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:31:00.653Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "8b23fe4d-0c87-42e0-a7d2-9dd6e428dc06", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:31:05.768Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "bb027ce2-c043-4b01-9a52-137a6ef80b43", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:31:10.880Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "de8cb9bb-2750-4ff9-bf56-1db52e74f46f", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:31:19.644Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1c72d4e7-296c-4df3-b4b8-064f457928c9"} 2025-12-04T13:31:23.726Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "37404fe3-862f-4633-8b5c-a4fa69f5d318"} 2025-12-04T13:31:24.380Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "af5ae942-818c-4748-a81b-df56c5678a35", "user": "monitor"} 2025-12-04T13:31:24.394Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "af5ae942-818c-4748-a81b-df56c5678a35", "user": "monitor"} 2025-12-04T13:31:24.413Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "af5ae942-818c-4748-a81b-df56c5678a35", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-04T13:31:24.431Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "af5ae942-818c-4748-a81b-df56c5678a35", "user": "monitor"} 2025-12-04T13:31:24.487Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "af5ae942-818c-4748-a81b-df56c5678a35", "user": "monitor"} 2025-12-04T13:31:24.798Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "af5ae942-818c-4748-a81b-df56c5678a35", "last-applied-secret": "a0ff34e1cb9ef72733a47c6f82a4c60f627a13ef7b76d2563ba830b2f0692f67"} 2025-12-04T13:31:24.805Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "af5ae942-818c-4748-a81b-df56c5678a35", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:31:27.079Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "af5ae942-818c-4748-a81b-df56c5678a35", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-04T13:31:57.097Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1d5cb253-d45a-40f6-82cf-9a73d0b980e3", "user": "monitor"} 2025-12-04T13:31:59.043Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1d5cb253-d45a-40f6-82cf-9a73d0b980e3"} 2025-12-04T13:32:02.082Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "5f621395-fe1f-4ae8-bf56-3d318edf3989", "user": "monitor"} 2025-12-04T13:32:04.243Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "5f621395-fe1f-4ae8-bf56-3d318edf3989"} 2025-12-04T13:32:07.630Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "48c1f182-e3f3-48f2-b5ad-160473d6ae17", "user": "monitor"} 2025-12-04T13:32:09.727Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "48c1f182-e3f3-48f2-b5ad-160473d6ae17"} 2025-12-04T13:32:13.175Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "95af408d-caf3-4cf7-9467-625dbd23dcc8", "user": "monitor"} 2025-12-04T13:32:15.424Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "95af408d-caf3-4cf7-9467-625dbd23dcc8"} 2025-12-04T13:32:18.713Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "cba54dd1-e64a-4b16-8ae7-9a434df75dca", "user": "monitor"} 2025-12-04T13:32:20.866Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "cba54dd1-e64a-4b16-8ae7-9a434df75dca"} 2025-12-04T13:32:24.257Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "33448d48-6f2e-409f-9633-947269db547f", "user": "monitor"} 2025-12-04T13:32:24.922Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "33448d48-6f2e-409f-9633-947269db547f", "user": "monitor"} 2025-12-04T13:32:24.937Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "33448d48-6f2e-409f-9633-947269db547f", "last-applied-secret": "a0ff34e1cb9ef72733a47c6f82a4c60f627a13ef7b76d2563ba830b2f0692f67"} 2025-12-04T13:32:26.867Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "33448d48-6f2e-409f-9633-947269db547f"} 2025-12-04T13:32:32.149Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "f444cc8a-7d54-4b88-9725-bb311d9c1cef"} 2025-12-04T13:32:37.337Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "af59b2e7-a778-4056-81a2-a27b84ec2e07"} 2025-12-04T13:32:42.649Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "c4003536-0587-4954-bd8f-485d06a271a0"} 2025-12-04T13:32:48.716Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "096f6702-1c68-4228-b89f-9f970108011a"} 2025-12-04T13:32:51.552Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e4371021-1e6d-41b1-896f-1fc97a14dc81", "user": "operator"} 2025-12-04T13:32:51.568Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e4371021-1e6d-41b1-896f-1fc97a14dc81", "user": "operator"} 2025-12-04T13:32:51.590Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e4371021-1e6d-41b1-896f-1fc97a14dc81", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-04T13:32:51.607Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e4371021-1e6d-41b1-896f-1fc97a14dc81", "user": "operator"} 2025-12-04T13:32:51.622Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e4371021-1e6d-41b1-896f-1fc97a14dc81", "user": "operator"} 2025-12-04T13:32:51.648Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e4371021-1e6d-41b1-896f-1fc97a14dc81", "last-applied-secret": "365e89da8abbbd3de455f71f11c46fa17a286d73cad77ce9c8205e9a523796d1"} 2025-12-04T13:32:51.653Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e4371021-1e6d-41b1-896f-1fc97a14dc81", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:32:54.779Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e4371021-1e6d-41b1-896f-1fc97a14dc81", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-04T13:33:12.410Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "dfac23cb-5d65-475f-99e2-9e308046c9fe"} 2025-12-04T13:33:16.732Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "b896c524-5edb-439b-bab3-35ceea905e88"} 2025-12-04T13:33:22.192Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9d9e452f-04af-4a7b-8de6-4401d9d959a3"} 2025-12-04T13:33:27.123Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "71214ae0-68ee-4d83-9549-95a9c51aec53"} 2025-12-04T13:33:29.640Z INFO Created user secrets {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "secrets": "my-cluster-secrets-2"} 2025-12-04T13:33:29.647Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "root"} 2025-12-04T13:33:29.671Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "root"} 2025-12-04T13:33:29.689Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "secret": "some-name-mysql-init", "user": "root"} 2025-12-04T13:33:31.819Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992"} 2025-12-04T13:33:31.846Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "root"} 2025-12-04T13:33:31.867Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "root"} 2025-12-04T13:33:31.874Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "operator"} 2025-12-04T13:33:31.891Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "operator"} 2025-12-04T13:33:31.919Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-04T13:33:31.938Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "operator"} 2025-12-04T13:33:31.952Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "operator"} 2025-12-04T13:33:31.960Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "monitor"} 2025-12-04T13:33:31.975Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "monitor"} 2025-12-04T13:33:31.994Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-04T13:33:32.012Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "monitor"} 2025-12-04T13:33:32.031Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "monitor"} 2025-12-04T13:33:32.579Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "xtrabackup"} 2025-12-04T13:33:32.593Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "xtrabackup"} 2025-12-04T13:33:32.613Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-04T13:33:32.636Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "xtrabackup"} 2025-12-04T13:33:32.647Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "xtrabackup"} 2025-12-04T13:33:32.655Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "replication"} 2025-12-04T13:33:32.669Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "replication"} 2025-12-04T13:33:32.684Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "secret": "some-name-mysql-init", "user": "replication"} 2025-12-04T13:33:32.706Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "replication"} 2025-12-04T13:33:32.719Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "replication"} 2025-12-04T13:33:32.719Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "proxyadmin"} 2025-12-04T13:33:32.737Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "proxyadmin"} 2025-12-04T13:33:32.757Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "user": "proxyadmin"} 2025-12-04T13:33:32.757Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "last-applied-secret": "f63160b4b412887ede0b7b4f08949b3b0dc61854b9bef21d65cb544757499d39"} 2025-12-04T13:33:32.757Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "last-applied-secret": "f63160b4b412887ede0b7b4f08949b3b0dc61854b9bef21d65cb544757499d39"} 2025-12-04T13:33:32.760Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:33:32.818Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:33:34.691Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "9125b72f-3c05-4668-80d3-4d3502883992", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-04T13:33:49.843Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "3dfee3cf-f9a3-432d-a8b7-395a762dbeed", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-9213 on 34.118.224.10:53: no such host"} 2025-12-04T13:34:31.674Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "f6eb7d07-9000-4b40-b671-88b0d2c780f2", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.1.80.47:33062: connect: connection refused"} 2025-12-04T13:35:23.529Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "df7f544a-382e-4456-bda2-2b4842bda1ac", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:35:28.649Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "8393ec5e-4551-4d1a-8e7f-3fdc4e986025", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:35:33.789Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "40cdd63c-2d68-4f44-b6f5-130535c68e9c", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:35:38.926Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "695a4e1e-365a-4122-9c31-879283c3e296", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:35:44.068Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "298ab882-5cb4-462a-9fa9-7b18a8516977", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:35:49.205Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "bf141e17-eaf9-4d2f-b76e-2ef8cf1c1bd3", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:35:54.326Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "be09236c-360b-42ac-9606-d8c0595ba371", "primary name": "some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local"} 2025-12-04T13:36:00.233Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "3a6cded9-b55f-4ca4-ae5b-7cbb53b3ea46", "user": "monitor"} 2025-12-04T13:36:01.699Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "3a6cded9-b55f-4ca4-ae5b-7cbb53b3ea46", "user": "monitor"} 2025-12-04T13:36:01.715Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "3a6cded9-b55f-4ca4-ae5b-7cbb53b3ea46", "last-applied-secret": "f63160b4b412887ede0b7b4f08949b3b0dc61854b9bef21d65cb544757499d39"} 2025-12-04T13:36:04.372Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "24dbb880-fdce-4c28-bb95-5d92e61020cb", "user": "operator"} 2025-12-04T13:36:04.390Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "24dbb880-fdce-4c28-bb95-5d92e61020cb", "user": "operator"} 2025-12-04T13:36:04.405Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "24dbb880-fdce-4c28-bb95-5d92e61020cb", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-04T13:36:04.423Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "24dbb880-fdce-4c28-bb95-5d92e61020cb", "user": "operator"} 2025-12-04T13:36:04.438Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "24dbb880-fdce-4c28-bb95-5d92e61020cb", "user": "operator"} 2025-12-04T13:36:04.462Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "24dbb880-fdce-4c28-bb95-5d92e61020cb", "last-applied-secret": "d30a30b35a604818b99b30d496f1bf6fb6c1c89a5ead6dff5a2fa17327f8461d"} 2025-12-04T13:36:04.466Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "24dbb880-fdce-4c28-bb95-5d92e61020cb", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:36:05.137Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "3a6cded9-b55f-4ca4-ae5b-7cbb53b3ea46", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: replication\nAdding user to ProxySQL: replication\n / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-9213.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: replication\nAdding user to ProxySQL: replication\n / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-04T13:36:08.759Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "f21a4140-1101-483a-af97-a28f15b454d2", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9213.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-04T13:36:33.769Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e5952ded-e51c-4dba-8665-81396ec111b9"} 2025-12-04T13:36:37.291Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "1f46919f-002a-4c32-8166-1faec5c8057c"} 2025-12-04T13:36:42.349Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "dc0a4295-ff4c-4d0c-9311-e0f8935ad149"} 2025-12-04T13:36:48.448Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "b93fda4e-b776-44e9-90e0-168e0a9271c7"} 2025-12-04T13:36:52.886Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "fe7690d8-b6c6-4224-8763-4bd3c184d9b2"} 2025-12-04T13:36:58.170Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "d8f9962c-53b2-455d-99a4-a6d7a30b6463"} 2025-12-04T13:37:04.588Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "5a6c603c-1866-481b-88c4-431e75c93790"} 2025-12-04T13:37:09.480Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "d22c9397-a4eb-41f1-8445-0982878da028"} 2025-12-04T13:37:14.772Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "3d91ad3e-4014-41d8-b94e-cf498defc8b6"} 2025-12-04T13:37:19.546Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "00dd419d-6752-4945-9a24-c272f788e5fb"} 2025-12-04T13:37:24.859Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "5a38c810-4fd6-4191-9d77-d43cb1961d04"} 2025-12-04T13:37:30.258Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "b7c94a12-da5a-4596-bb70-2b0b8ef52732"} 2025-12-04T13:37:35.755Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "16fa851f-a33a-42d3-a118-30eb3c6d7abd"} 2025-12-04T13:37:40.982Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "73ed509a-0d78-410b-a21f-495e6250ca2d"} 2025-12-04T13:37:44.957Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "root"} 2025-12-04T13:37:44.980Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "root"} 2025-12-04T13:37:45.001Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "secret": "some-name-mysql-init", "user": "root"} 2025-12-04T13:37:49.037Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb"} 2025-12-04T13:37:49.058Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "54a6b607-001d-4437-b31c-8d618c724b37"} 2025-12-04T13:37:49.058Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "root"} 2025-12-04T13:37:49.075Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "root"} 2025-12-04T13:37:49.088Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "monitor"} 2025-12-04T13:37:49.102Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "monitor"} 2025-12-04T13:37:49.134Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-04T13:37:49.152Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "monitor"} 2025-12-04T13:37:49.170Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "monitor"} 2025-12-04T13:37:49.464Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "xtrabackup"} 2025-12-04T13:37:49.478Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "xtrabackup"} 2025-12-04T13:37:49.496Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-04T13:37:49.514Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "xtrabackup"} 2025-12-04T13:37:49.526Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "xtrabackup"} 2025-12-04T13:37:49.533Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "proxyadmin"} 2025-12-04T13:37:49.552Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "proxyadmin"} 2025-12-04T13:37:49.570Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "user": "proxyadmin"} 2025-12-04T13:37:49.570Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "last-applied-secret": "ca6288516152374dfd1a227f05439e2dbe76ba5eca3e733235bda272efeebd71"} 2025-12-04T13:37:49.570Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "last-applied-secret": "ca6288516152374dfd1a227f05439e2dbe76ba5eca3e733235bda272efeebd71"} 2025-12-04T13:37:49.573Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:37:49.672Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:37:51.145Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "e3dfbfd3-7936-4448-93b9-965fe4b2b2bb", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-04T13:38:04.512Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "ff4fb3d5-2c55-4e1e-aa89-c67ad3e81ed9", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:38:04.716Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "ff4fb3d5-2c55-4e1e-aa89-c67ad3e81ed9", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-04T13:38:04.763Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "ff4fb3d5-2c55-4e1e-aa89-c67ad3e81ed9", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-04T13:38:04.844Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "ff4fb3d5-2c55-4e1e-aa89-c67ad3e81ed9", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-04T13:38:04.998Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "ff4fb3d5-2c55-4e1e-aa89-c67ad3e81ed9", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-04T13:38:05.986Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "cc12d1d7-e057-4e19-9603-798adb7dc6f5", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-04T13:39:39.372Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "b86a38d6-24cf-443f-a605-372a62d6048f", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9213 on 34.118.224.10:53: no such host"} 2025-12-04T13:40:15.731Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "952ed3c5-0660-477f-b725-651f4fb042c0", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-9213 on 34.118.224.10:53: no such host"} 2025-12-04T13:41:08.822Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "root"} 2025-12-04T13:41:08.845Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "root"} 2025-12-04T13:41:08.868Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "secret": "some-name-mysql-init", "user": "root"} 2025-12-04T13:41:08.898Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "root"} 2025-12-04T13:41:08.916Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "root"} 2025-12-04T13:41:08.923Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "operator"} 2025-12-04T13:41:08.937Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "operator"} 2025-12-04T13:41:08.959Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-04T13:41:08.980Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "operator"} 2025-12-04T13:41:08.993Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "operator"} 2025-12-04T13:41:09.000Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "monitor"} 2025-12-04T13:41:09.015Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "monitor"} 2025-12-04T13:41:09.037Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-04T13:41:09.060Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "monitor"} 2025-12-04T13:41:09.356Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "xtrabackup"} 2025-12-04T13:41:09.374Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "xtrabackup"} 2025-12-04T13:41:09.401Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-04T13:41:09.424Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "xtrabackup"} 2025-12-04T13:41:09.437Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "xtrabackup"} 2025-12-04T13:41:09.442Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "replication"} 2025-12-04T13:41:09.456Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "replication"} 2025-12-04T13:41:09.482Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "secret": "some-name-mysql-init", "user": "replication"} 2025-12-04T13:41:09.508Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "replication"} 2025-12-04T13:41:09.520Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "user": "replication"} 2025-12-04T13:41:09.520Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "last-applied-secret": "365e89da8abbbd3de455f71f11c46fa17a286d73cad77ce9c8205e9a523796d1"} 2025-12-04T13:41:09.522Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "6fbd0c15-e6c9-4d9d-9162-78ceae660a6b", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-04T13:43:33.529Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "27ac359e-59d8-4bf5-b9fa-fd7fbbec9901", "user": "monitor"} 2025-12-04T13:43:34.383Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "27ac359e-59d8-4bf5-b9fa-fd7fbbec9901", "user": "monitor"} 2025-12-04T13:43:36.123Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "b7a2b85c-d0ae-44f1-b441-0bfd873f4a9b", "user": "monitor"} 2025-12-04T13:43:36.137Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "b7a2b85c-d0ae-44f1-b441-0bfd873f4a9b", "user": "monitor"} 2025-12-04T13:43:36.155Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "b7a2b85c-d0ae-44f1-b441-0bfd873f4a9b", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-04T13:43:36.171Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "b7a2b85c-d0ae-44f1-b441-0bfd873f4a9b", "user": "monitor"} 2025-12-04T13:43:38.539Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "424a87f6-be00-40fe-8e94-3cb55126dce8", "user": "monitor"} 2025-12-04T13:43:44.140Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "bf6acf6f-50b9-443e-a484-ac19abb99d81", "user": "monitor"} 2025-12-04T13:43:49.729Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "dca8f2fd-f63e-491c-9376-bf48cf4accb8", "user": "monitor"} 2025-12-04T13:43:55.054Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "fbe7971b-2fef-455d-9980-10880180b8ff", "user": "monitor"} 2025-12-04T13:44:00.596Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-9213"}, "namespace": "users-9213", "name": "some-name", "reconcileID": "938b1591-bfd3-441c-93a4-7bd6b717a533", "user": "monitor"} - "22d3e3ba01dd0f6831b84d96f6d47709033092925d0046a00948e30237344e", ... // 22 identical fields ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields - "33b72f9a984ad44ecf5b9c6b43b47a91a83ef9121a8bc72979eea0ca046dbc33", + "365e89da8abbbd3de455f71f11c46fa17a286d73cad77ce9c8205e9a523796d", - "365e89da8abbbd3de455f71f11c46fa17a286d73cad77ce9c8205e9a523796d1", + "365e89da8abbbd3de455f71f11c46fa17a286d73cad77ce9c8205e9a523796d1", ... // 3 identical elements ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields ... // 4 identical fields ... // 5 identical elements ... // 5 identical fields ... // 5 identical fields ... // 5 identical fields ... // 6 identical fields ... // 6 identical fields ... // 7 identical fields ... // 8 identical fields ... // 9 identical fields ... // 9 identical fields + "a", - "a0ff34e1cb9ef72733a47c6f82a4c60f627a13ef7b76d2563ba830b2f0692f67", AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ + APIVersion: "", - APIVersion: "apps/v1", - APIVersion: "apps/v1", - APIVersion: "v1", Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, - Args: []string{"logrotate"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, - "ca6288516152374dfd1a227f05439e2dbe76ba5eca3e733235bda272efeebd7", + "ca6288516152374dfd1a227f05439e2dbe76ba5eca3e733235bda272efeebd71", Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMapKeyRef: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-12-04 13:21:57 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-proxysql-5cd8678df5", - CurrentRevision: "some-name-proxysql-5d8bfb4fdc", - CurrentRevision: "some-name-proxysql-6d5f9b6c56", - CurrentRevision: "some-name-proxysql-798b57769b", - CurrentRevision: "some-name-proxysql-859bc8c7d7", - CurrentRevision: "some-name-proxysql-9444b7b6", - CurrentRevision: "some-name-pxc-54bb7d678b", - CurrentRevision: "some-name-pxc-54ff477d8c", - CurrentRevision: "some-name-pxc-6b866f555d", - CurrentRevision: "some-name-pxc-78bf8598b6", - "d30a30b35a604818b99b30d496f1bf6fb6c1c89a5ead6dff5a2fa17327f8461d", + "d30a30b35a604818b99b30d496f1bf6fb6c1c89a5ead6dff5a2fa17327f8461d", DataSource: nil, DataSourceRef: nil, - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", "e", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, - EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, Env: []v1.EnvVar{ - Env: []v1.EnvVar{ EphemeralContainers: nil, - "f63160b4b412887ede0b7b4f08949b3b0dc61854b9bef21d65cb544757499d39", + "f63160b4b412887ede0b7b4f08949b3b0dc61854b9bef21d65cb544757499d39", FailureThreshold: 3, FC: nil, + "ff34", FieldPath: "metadata.name", FieldPath: "metadata.namespace", FieldRef: &v1.ObjectFieldSelector{ - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:857 HostAliases: nil, HostIP: "", HostPort: 0, - Image: "perconalab/fluentbit:main-logcollector", - Image: "perconalab/fluentbit:main-logcollector", ImagePullPolicy: "Always", - ImagePullPolicy: "Always", InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "0e22d3e3ba01dd0f6831b84d96f6d47709033092925d0046a00948e30237344e", + "last-applied-secret": "33b72f9a984ad44ecf5b9c6b43b47a91a83ef9121a8bc72979eea0ca046dbc33", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, [mysql] 2025/12/04 13:29:48 packets.go:58 read tcp 10.1.81.55:60802->10.1.80.45:33062: read: connection reset by peer [mysql] 2025/12/04 13:43:16 packets.go:58 unexpected EOF Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, {Name: "CLUSTER_HASH", Value: "2633470"}, Name: "config", - {Name: "IS_LOGCOLLECTOR", Value: "yes"}, Name: "ist", {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - Name: "logrotate", - Name: "logs", {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, - {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, Name: "POD_NAME", Name: "POD_NAMESPASE", - {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, - {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, Name: "proxyadm", - {Name: "SERVICE_TYPE", Value: "mysql"}, Namespace: "users-9213", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "0e22d3e3ba01dd0f6831b84d96f6d47709033092925d0046a00948e30237344e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "ca6288516152374dfd1a227f05439e2dbe76ba5eca3e733235bda272efeebd71", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "5239bcc3-22ff-456c-a2a2-9243aef2afea", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGUyMmQzZTNiYTAxZGQwZjY4MzFiODRkOTZmNmQ0NzcwOTAzMzA5MjkyNWQwMDQ2YTAwOTQ4ZTMwMjM3MzQ0ZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzY1ZTg5ZGE4YWJiYmQzZGU0NTVmNzFmMTFjNDZmYTE3YTI4NmQ3M2NhZDc3Y2U5YzgyMDVlOWE1MjM3OTZkMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzY1ZTg5ZGE4YWJiYmQzZGU0NTVmNzFmMTFjNDZmYTE3YTI4NmQ3M2NhZDc3Y2U5YzgyMDVlOWE1MjM3OTZkMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2E2Mjg4NTE2MTUyMzc0ZGZkMWEyMjdmMDU0MzllMmRiZTc2YmE1ZWNhM2U3MzMyMzViZGEyNzJlZmVlYmQ3MSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYTBmZjM0ZTFjYjllZjcyNzMzYTQ3YzZmODJhNGM2MGY2MjdhMTNlZjdiNzZkMjU2M2JhODMwYjJmMDY5MmY2NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYTBmZjM0ZTFjYjllZjcyNzMzYTQ3YzZmODJhNGM2MGY2MjdhMTNlZjdiNzZkMjU2M2JhODMwYjJmMDY5MmY2NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDMwYTMwYjM1YTYwNDgxOGI5OWIzMGQ0OTZmMWJmNmZiNmMxYzg5YTVlYWQ2ZGZmNWEyZmExNzMyN2Y4NDYxZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDMwYTMwYjM1YTYwNDgxOGI5OWIzMGQ0OTZmMWJmNmZiNmMxYzg5YTVlYWQ2ZGZmNWEyZmExNzMyN2Y4NDYxZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjYzMTYwYjRiNDEyODg3ZWRlMGI3YjRmMDg5NDliM2IwZGM2MTg1NGI5YmVmMjFkNjVjYjU0NDc1NzQ5OWQzOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjYzMTYwYjRiNDEyODg3ZWRlMGI3YjRmMDg5NDliM2IwZGM2MTg1NGI5YmVmMjFkNjVjYjU0NDc1NzQ5OWQzOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzNiNzJmOWE5ODRhZDQ0ZWNmNWI5YzZiNDNiNDdhOTFhODNlZjkxMjFhOGJjNzI5NzllZWEwY2EwNDZkYmMzMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzNiNzJmOWE5ODRhZDQ0ZWNmNWI5YzZiNDNiNDdhOTFhODNlZjkxMjFhOGJjNzI5NzllZWEwY2EwNDZkYmMzMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzY1ZTg5ZGE4YWJiYmQzZGU0NTVmNzFmMTFjNDZmYTE3YTI4NmQ3M2NhZDc3Y2U5YzgyMDVlOWE1MjM3OTZkMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2E2Mjg4NTE2MTUyMzc0ZGZkMWEyMjdmMDU0MzllMmRiZTc2YmE1ZWNhM2U3MzMyMzViZGEyNzJlZmVlYmQ3MSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2E2Mjg4NTE2MTUyMzc0ZGZkMWEyMjdmMDU0MzllMmRiZTc2YmE1ZWNhM2U3MzMyMzViZGEyNzJlZmVlYmQ3MSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2E2Mjg4NTE2MTUyMzc0ZGZkMWEyMjdmMDU0MzllMmRiZTc2YmE1ZWNhM2U3MzMyMzViZGEyNzJlZmVlYmQ3MSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMDc2LWVlMWJlMGJhIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL2ZsdWVudGJpdDptYWluLWxvZ2NvbGxlY3RvciIsImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWxvZy1jb2xsZWN0b3IiLCJvcHRpb25hbCI6dHJ1ZX19XSwiZW52IjpbeyJuYW1lIjoiTE9HX0RBVEFfRElSIiwidmFsdWUiOiIvdmFyL2xpYi9teXNxbCJ9LHsibmFtZSI6IlBPRF9OQU1FU1BBU0UiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZXNwYWNlIn19fSx7Im5hbWUiOiJQT0RfTkFNRSIsInZhbHVlRnJvbSI6eyJmaWVsZFJlZiI6eyJmaWVsZFBhdGgiOiJtZXRhZGF0YS5uYW1lIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoibG9ncm90YXRlIiwiaW1hZ2UiOiJwZXJjb25hbGFiL2ZsdWVudGJpdDptYWluLWxvZ2NvbGxlY3RvciIsImFyZ3MiOlsibG9ncm90YXRlIl0sImVudiI6W3sibmFtZSI6IlNFUlZJQ0VfVFlQRSIsInZhbHVlIjoibXlzcWwifSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6InB4YyIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tcHhjOC4wIiwiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9weGMtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbIm15c3FsZCJdLCJwb3J0cyI6W3sibmFtZSI6Im15c3FsIiwiY29udGFpbmVyUG9ydCI6MzMwNn0seyJuYW1lIjoic3N0IiwiY29udGFpbmVyUG9ydCI6NDQ0NH0seyJuYW1lIjoid3JpdGUtc2V0IiwiY29udGFpbmVyUG9ydCI6NDU2N30seyJuYW1lIjoiaXN0IiwiY29udGFpbmVyUG9ydCI6NDU2OH0seyJuYW1lIjoibXlzcWwtYWRtaW4iLCJjb250YWluZXJQb3J0IjozMzA2Mn0seyJuYW1lIjoibXlzcWx4IiwiY29udGFpbmVyUG9ydCI6MzMwNjB9XSwiZW52RnJv"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2E2Mjg4NTE2MTUyMzc0ZGZkMWEyMjdmMDU0MzllMmRiZTc2YmE1ZWNhM2U3MzMyMzViZGEyNzJlZmVlYmQ3MSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMDc2LWVlMWJlMGJhIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIyNjMzNDcwIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjYzMTYwYjRiNDEyODg3ZWRlMGI3YjRmMDg5NDliM2IwZGM2MTg1NGI5YmVmMjFkNjVjYjU0NDc1NzQ5OWQzOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjYzMTYwYjRiNDEyODg3ZWRlMGI3YjRmMDg5NDliM2IwZGM2MTg1NGI5YmVmMjFkNjVjYjU0NDc1NzQ5OWQzOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGUyMmQzZTNiYTAxZGQwZjY4MzFiODRkOTZmNmQ0NzcwOTAzMzA5MjkyNWQwMDQ2YTAwOTQ4ZTMwMjM3MzQ0ZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: nil, Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, ResizePolicy: nil, ResourceFieldRef: nil, Resources: {}, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1764854554066239004", - ResourceVersion: "1764854738814719005", - ResourceVersion: "1764854867141007004", - ResourceVersion: "1764854905577039004", - ResourceVersion: "1764854923226623004", - ResourceVersion: "1764855072377071005", - ResourceVersion: "1764855115018015004", - ResourceVersion: "1764855187085279004", - ResourceVersion: "1764855228153471004", - ResourceVersion: "1764855355131023005", - ResourceVersion: "1764855393998911004", - ResourceVersion: "1764855469805503005", - ResourceVersion: "1764855664511375005", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-12-04 13:21:57 +0000 UTC", - Time: s"2025-12-04 13:22:34 +0000 UTC", - Time: s"2025-12-04 13:25:38 +0000 UTC", - Time: s"2025-12-04 13:27:28 +0000 UTC", - Time: s"2025-12-04 13:27:47 +0000 UTC", - Time: s"2025-12-04 13:27:53 +0000 UTC", - Time: s"2025-12-04 13:28:25 +0000 UTC", - Time: s"2025-12-04 13:28:42 +0000 UTC", - Time: s"2025-12-04 13:28:43 +0000 UTC", - Time: s"2025-12-04 13:28:49 +0000 UTC", - Time: s"2025-12-04 13:31:12 +0000 UTC", - Time: s"2025-12-04 13:31:24 +0000 UTC", - Time: s"2025-12-04 13:31:55 +0000 UTC", - Time: s"2025-12-04 13:32:51 +0000 UTC", - Time: s"2025-12-04 13:33:07 +0000 UTC", - Time: s"2025-12-04 13:33:32 +0000 UTC", - Time: s"2025-12-04 13:33:48 +0000 UTC", - Time: s"2025-12-04 13:35:55 +0000 UTC", - Time: s"2025-12-04 13:36:04 +0000 UTC", - Time: s"2025-12-04 13:36:33 +0000 UTC", - Time: s"2025-12-04 13:37:49 +0000 UTC", - Time: s"2025-12-04 13:37:49 +0000 UTC", - Time: s"2025-12-04 13:38:04 +0000 UTC", - Time: s"2025-12-04 13:41:04 +0000 UTC", Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "2779f3bc-79bc-4611-b53f-ae0ed96f38e9", - UID: "d82adca3-7598-4170-8e2e-2c5b2ecb9957", UpdatedReplicas: 0, + UpdatedReplicas: 0, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-proxysql-5cd8678df5", - UpdateRevision: "some-name-proxysql-5d8bfb4fdc", - UpdateRevision: "some-name-proxysql-6d5f9b6c56", - UpdateRevision: "some-name-proxysql-798b57769b", - UpdateRevision: "some-name-proxysql-859bc8c7d7", - UpdateRevision: "some-name-proxysql-9444b7b6", - UpdateRevision: "some-name-pxc-54bb7d678b", - UpdateRevision: "some-name-pxc-54ff477d8c", - UpdateRevision: "some-name-pxc-6648f5bb79", - UpdateRevision: "some-name-pxc-6b866f555d", - UpdateRevision: "some-name-pxc-78bf8598b6", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ Value: "", ValueFrom: &v1.EnvVarSource{ VolumeAttributesClassName: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, - VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-9213 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.JC5xrTFdGG ++ mktemp + local LAST_ERR=/tmp/tmp.dl4ErVmhVy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JC5xrTFdGG perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-9213 namespace + cat /tmp/tmp.dl4ErVmhVy + rm /tmp/tmp.JC5xrTFdGG /tmp/tmp.dl4ErVmhVy + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.212KVb1yJ8 ++ mktemp + local LAST_ERR=/tmp/tmp.fD1xX6o1xS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.212KVb1yJ8 No resources found + cat /tmp/tmp.fD1xX6o1xS + rm /tmp/tmp.212KVb1yJ8 /tmp/tmp.fD1xX6o1xS + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.V44u1A0ZXr ++ mktemp + local LAST_ERR=/tmp/tmp.R2kGzmO4Zh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.V44u1A0ZXr No resources found + cat /tmp/tmp.R2kGzmO4Zh + rm /tmp/tmp.V44u1A0ZXr /tmp/tmp.R2kGzmO4Zh + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.MUQELjiW4N ++ mktemp + local LAST_ERR=/tmp/tmp.g3C9f4cvHL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MUQELjiW4N validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.g3C9f4cvHL + rm /tmp/tmp.MUQELjiW4N /tmp/tmp.g3C9f4cvHL + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-9213 + rm -rf /tmp/tmp.tJZuaBgojY + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + local LAST_OUT=/tmp/tmp.uTF1h8W9vf + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.eWu1hbz3Ra ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.CKl5CqaUrS + local exit_status=0 + local LAST_ERR=/tmp/tmp.xB51oH9rkL + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-9213 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator