Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/logs/users-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-17636 + local ns=users-17636 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-32520 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ffCuwilrzF ++ mktemp + local LAST_ERR=/tmp/tmp.N91j4rJxBU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ffCuwilrzF perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-32520 namespace + cat /tmp/tmp.N91j4rJxBU + rm /tmp/tmp.ffCuwilrzF /tmp/tmp.N91j4rJxBU + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.jZ1p5XZ9sr ++ mktemp + local LAST_ERR=/tmp/tmp.BXE05NLzs9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jZ1p5XZ9sr No resources found + cat /tmp/tmp.BXE05NLzs9 + rm /tmp/tmp.jZ1p5XZ9sr /tmp/tmp.BXE05NLzs9 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.MwQevL61Zn ++ mktemp + local LAST_ERR=/tmp/tmp.H04Xjj0F8f + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MwQevL61Zn No resources found + cat /tmp/tmp.H04Xjj0F8f + rm /tmp/tmp.MwQevL61Zn /tmp/tmp.H04Xjj0F8f + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace + xargs kubectl delete ns ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.3c1pVhA9EN ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.E1upq50jU6 + local exit_status=0 + local LAST_OUT=/tmp/tmp.E9EqdNnaq3 ++ seq 0 2 ++ mktemp + local LAST_ERR=/tmp/tmp.VxlcHh0qeT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3c1pVhA9EN + cat /tmp/tmp.E1upq50jU6 + rm /tmp/tmp.3c1pVhA9EN /tmp/tmp.E1upq50jU6 + return 0 namespace "users-32520" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.E9EqdNnaq3 namespace "pxc-operator" deleted + cat /tmp/tmp.VxlcHh0qeT + rm /tmp/tmp.E9EqdNnaq3 /tmp/tmp.VxlcHh0qeT + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.SMxPabnoMR ++ mktemp + local LAST_ERR=/tmp/tmp.HkzEZPZmNa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SMxPabnoMR namespace/pxc-operator created + cat /tmp/tmp.HkzEZPZmNa + rm /tmp/tmp.SMxPabnoMR /tmp/tmp.HkzEZPZmNa + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.GAwHHrOZ1q +++ mktemp ++ local LAST_ERR=/tmp/tmp.SMn1JVneGt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GAwHHrOZ1q ++ cat /tmp/tmp.SMn1JVneGt ++ rm /tmp/tmp.GAwHHrOZ1q /tmp/tmp.SMn1JVneGt ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2207-89209ce1-16-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.4riyHpTbNa ++ mktemp + local LAST_ERR=/tmp/tmp.RntQ379J5j + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2207-89209ce1-16-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4riyHpTbNa Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2207-89209ce1-16-cluster8" modified. + cat /tmp/tmp.RntQ379J5j + rm /tmp/tmp.4riyHpTbNa /tmp/tmp.RntQ379J5j + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.mCl437mKjo ++ mktemp + local LAST_ERR=/tmp/tmp.HBrgpA3Fyz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mCl437mKjo customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.HBrgpA3Fyz + rm /tmp/tmp.mCl437mKjo /tmp/tmp.HBrgpA3Fyz + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.uptR8QcMKG ++ mktemp + local LAST_ERR=/tmp/tmp.9G8zPrqFWQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uptR8QcMKG clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.9G8zPrqFWQ + rm /tmp/tmp.uptR8QcMKG /tmp/tmp.9G8zPrqFWQ + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/deploy/cw-operator.yaml + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2207-89209ce1^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.03jY2zcIJy ++ mktemp + local LAST_ERR=/tmp/tmp.3bGJNJxuXB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.03jY2zcIJy deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.3bGJNJxuXB + rm /tmp/tmp.03jY2zcIJy /tmp/tmp.3bGJNJxuXB + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.yJXT4Wt8f6 ++ mktemp + local LAST_ERR=/tmp/tmp.urqgKM2MBN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yJXT4Wt8f6 pod/percona-xtradb-cluster-operator-58dd9fd94c-ntrb4 condition met + cat /tmp/tmp.urqgKM2MBN + rm /tmp/tmp.yJXT4Wt8f6 /tmp/tmp.urqgKM2MBN + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.KKAYa9JOe8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.UkcBRpUkdW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KKAYa9JOe8 ++ cat /tmp/tmp.UkcBRpUkdW ++ rm /tmp/tmp.KKAYa9JOe8 /tmp/tmp.UkcBRpUkdW ++ return 0 + wait_pod percona-xtradb-cluster-operator-58dd9fd94c-ntrb4 480 pxc-operator + local pod=percona-xtradb-cluster-operator-58dd9fd94c-ntrb4 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-58dd9fd94c-ntrb4 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-58dd9fd94c-ntrb4 condition met waiting for pod/percona-xtradb-cluster-operator-58dd9fd94c-ntrb4 to become Ready.Ok + sleep 3 + create_namespace users-17636 + local namespace=users-17636 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces users-17636' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-17636 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-17636 ++ mktemp + awk '{print$1}' ++ mktemp + local LAST_OUT=/tmp/tmp.ver9FQllVu + local LAST_OUT=/tmp/tmp.HZ14vEtVyh ++ mktemp + local LAST_ERR=/tmp/tmp.oVv8thiGLf + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.cpxThtrdHs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-17636 + for i in '$(seq 0 2)' + set +e + kubectl get ns + xargs kubectl delete ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-17636 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HZ14vEtVyh + cat /tmp/tmp.cpxThtrdHs + rm /tmp/tmp.HZ14vEtVyh /tmp/tmp.cpxThtrdHs + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-17636 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.ver9FQllVu + cat /tmp/tmp.oVv8thiGLf Error from server (NotFound): namespaces "users-17636" not found + rm /tmp/tmp.ver9FQllVu /tmp/tmp.oVv8thiGLf + return 1 + : + wait_for_delete namespace/users-17636 + local res=namespace/users-17636 + echo -n 'waiting for namespace/users-17636 to be deleted' waiting for namespace/users-17636 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-17636" not found + desc 'create namespace users-17636' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-17636 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-17636 ++ mktemp + local LAST_OUT=/tmp/tmp.sRsrvlayFE ++ mktemp + local LAST_ERR=/tmp/tmp.t0ulMZr2nB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-17636 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sRsrvlayFE namespace/users-17636 created + cat /tmp/tmp.t0ulMZr2nB + rm /tmp/tmp.sRsrvlayFE /tmp/tmp.t0ulMZr2nB + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.KSLLYvPM1J +++ mktemp ++ local LAST_ERR=/tmp/tmp.PObkb5tvRD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KSLLYvPM1J ++ cat /tmp/tmp.PObkb5tvRD ++ rm /tmp/tmp.KSLLYvPM1J /tmp/tmp.PObkb5tvRD ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2207-89209ce1-16-cluster8 --namespace=users-17636 ++ mktemp + local LAST_OUT=/tmp/tmp.ppIR5a4xGi ++ mktemp + local LAST_ERR=/tmp/tmp.PHoBlVyFvX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2207-89209ce1-16-cluster8 --namespace=users-17636 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ppIR5a4xGi Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2207-89209ce1-16-cluster8" modified. + cat /tmp/tmp.PHoBlVyFvX + rm /tmp/tmp.ppIR5a4xGi /tmp/tmp.PHoBlVyFvX + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.yH1VmI69VO ++ mktemp + local LAST_ERR=/tmp/tmp.12b266tton + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yH1VmI69VO secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.12b266tton + rm /tmp/tmp.yH1VmI69VO /tmp/tmp.12b266tton + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.IH5iG6aCMC ++ mktemp + local LAST_ERR=/tmp/tmp.c8IgQtVxPr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IH5iG6aCMC secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.c8IgQtVxPr + rm /tmp/tmp.IH5iG6aCMC /tmp/tmp.c8IgQtVxPr + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2207-89209ce1#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-17636~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.1KrqS9mfnX ++ mktemp + local LAST_ERR=/tmp/tmp.c3TxSmKg1L + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1KrqS9mfnX deployment.apps/pxc-client created + cat /tmp/tmp.c3TxSmKg1L + rm /tmp/tmp.1KrqS9mfnX /tmp/tmp.c3TxSmKg1L + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_OUT=/tmp/tmp.PHtSkQcHjH + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-17636~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2207-89209ce1#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_ERR=/tmp/tmp.W38EKIRTdq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PHtSkQcHjH perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.W38EKIRTdq + rm /tmp/tmp.PHtSkQcHjH /tmp/tmp.W38EKIRTdq + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.ZmiU7QYilV ++++ mktemp +++ local LAST_ERR=/tmp/tmp.1twiWk65rR +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.ZmiU7QYilV +++ cat /tmp/tmp.1twiWk65rR +++ rm /tmp/tmp.ZmiU7QYilV /tmp/tmp.1twiWk65rR +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.tYl7497Yp2 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ARkWNDgS1g +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.tYl7497Yp2 +++ cat /tmp/tmp.ARkWNDgS1g +++ rm /tmp/tmp.tYl7497Yp2 /tmp/tmp.ARkWNDgS1g +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-17636 ++ mktemp + local LAST_OUT=/tmp/tmp.dvGIAecMpI ++ mktemp + local LAST_ERR=/tmp/tmp.JIvMd6FIWF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-17636 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-17636 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-17636 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.dvGIAecMpI + cat /tmp/tmp.JIvMd6FIWF error: no matching resources found + rm /tmp/tmp.dvGIAecMpI /tmp/tmp.JIvMd6FIWF + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.FDMLCYXe6C +++ mktemp ++ local LAST_ERR=/tmp/tmp.A9lwVaXcn7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FDMLCYXe6C ++ cat /tmp/tmp.A9lwVaXcn7 ++ rm /tmp/tmp.FDMLCYXe6C /tmp/tmp.A9lwVaXcn7 ++ return 0 + local 'root_pass=Kzxoigfpo'\'';' '-h some-name-pxc -uroot -p'\''o.omG<$.QtJ)9L>po'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''o.omG<$.QtJ)9L>po'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''o.omG<$.QtJ)9L>po'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ty2ORT5Tev +++ mktemp ++ local LAST_ERR=/tmp/tmp.TtS8t98hPM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ty2ORT5Tev ++ cat /tmp/tmp.TtS8t98hPM ++ rm /tmp/tmp.Ty2ORT5Tev /tmp/tmp.TtS8t98hPM ++ return 0 + client_pod=pxc-client-59944c5bbf-nhd97 + wait_pod pxc-client-59944c5bbf-nhd97 + local pod=pxc-client-59944c5bbf-nhd97 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-nhd97 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-nhd97 condition met waiting for pod/pxc-client-59944c5bbf-nhd97 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''o.omG<$.QtJ)9L>po'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''o.omG<$.QtJ)9L>po'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''o.omG<$.QtJ)9L>po'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''o.omG<$.QtJ)9L>po'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S5IMa4PonF +++ mktemp ++ local LAST_ERR=/tmp/tmp.OItZo1GYKC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S5IMa4PonF ++ cat /tmp/tmp.OItZo1GYKC ++ rm /tmp/tmp.S5IMa4PonF /tmp/tmp.OItZo1GYKC ++ return 0 + client_pod=pxc-client-59944c5bbf-nhd97 + wait_pod pxc-client-59944c5bbf-nhd97 + local pod=pxc-client-59944c5bbf-nhd97 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-nhd97 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-nhd97 condition met waiting for pod/pxc-client-59944c5bbf-nhd97 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.WP88OxA47Z/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.WP88OxA47Z/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.2uQMUur4XF +++ mktemp ++ local LAST_ERR=/tmp/tmp.jAQ92S2z3H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2uQMUur4XF ++ cat /tmp/tmp.jAQ92S2z3H ++ rm /tmp/tmp.2uQMUur4XF /tmp/tmp.jAQ92S2z3H ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.FyCNB4WSLq ++ mktemp + local LAST_ERR=/tmp/tmp.x1PcNmTDK0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FyCNB4WSLq secret/my-cluster-secrets-2 configured + cat /tmp/tmp.x1PcNmTDK0 Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.FyCNB4WSLq /tmp/tmp.x1PcNmTDK0 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BwWAHKrEfe +++ mktemp ++ local LAST_ERR=/tmp/tmp.zktDGik32Z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BwWAHKrEfe ++ cat /tmp/tmp.zktDGik32Z ++ rm /tmp/tmp.BwWAHKrEfe /tmp/tmp.zktDGik32Z ++ return 0 + client_pod=pxc-client-59944c5bbf-nhd97 + wait_pod pxc-client-59944c5bbf-nhd97 + local pod=pxc-client-59944c5bbf-nhd97 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-nhd97 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-nhd97 condition met waiting for pod/pxc-client-59944c5bbf-nhd97 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.WP88OxA47Z/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.WP88OxA47Z/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.Neg1Z5s4NK + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2207-89209ce1#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-17636~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_ERR=/tmp/tmp.BWaWsJQYGL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Neg1Z5s4NK perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.BWaWsJQYGL + rm /tmp/tmp.Neg1Z5s4NK /tmp/tmp.BWaWsJQYGL + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X81ITQTRTQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.j4hTQmGhNP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X81ITQTRTQ ++ cat /tmp/tmp.j4hTQmGhNP ++ rm /tmp/tmp.X81ITQTRTQ /tmp/tmp.j4hTQmGhNP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2N4cmBwpsg +++ mktemp ++ local LAST_ERR=/tmp/tmp.ENkp566EcP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2N4cmBwpsg ++ cat /tmp/tmp.ENkp566EcP ++ rm /tmp/tmp.2N4cmBwpsg /tmp/tmp.ENkp566EcP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cPyXzJ2OR4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EWo32VP1Kn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cPyXzJ2OR4 ++ cat /tmp/tmp.EWo32VP1Kn ++ rm /tmp/tmp.cPyXzJ2OR4 /tmp/tmp.EWo32VP1Kn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MU08NTln04 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gZvnDa2yrw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MU08NTln04 ++ cat /tmp/tmp.gZvnDa2yrw ++ rm /tmp/tmp.MU08NTln04 /tmp/tmp.gZvnDa2yrw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h0Viz3fbbv +++ mktemp ++ local LAST_ERR=/tmp/tmp.9D03TE0QhD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h0Viz3fbbv ++ cat /tmp/tmp.9D03TE0QhD ++ rm /tmp/tmp.h0Viz3fbbv /tmp/tmp.9D03TE0QhD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vZYJYbRunf +++ mktemp ++ local LAST_ERR=/tmp/tmp.f3aMCiZ1OT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vZYJYbRunf ++ cat /tmp/tmp.f3aMCiZ1OT ++ rm /tmp/tmp.vZYJYbRunf /tmp/tmp.f3aMCiZ1OT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4J220T3fO0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NO3F9aN3I2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4J220T3fO0 ++ cat /tmp/tmp.NO3F9aN3I2 ++ rm /tmp/tmp.4J220T3fO0 /tmp/tmp.NO3F9aN3I2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mRtnmHDrPg +++ mktemp ++ local LAST_ERR=/tmp/tmp.pdoIEiab9G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mRtnmHDrPg ++ cat /tmp/tmp.pdoIEiab9G ++ rm /tmp/tmp.mRtnmHDrPg /tmp/tmp.pdoIEiab9G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BFGq7qKoyA +++ mktemp ++ local LAST_ERR=/tmp/tmp.XX0BDmUKPo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BFGq7qKoyA ++ cat /tmp/tmp.XX0BDmUKPo ++ rm /tmp/tmp.BFGq7qKoyA /tmp/tmp.XX0BDmUKPo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0y5tD8L7xS +++ mktemp ++ local LAST_ERR=/tmp/tmp.EfMVgtuc5E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0y5tD8L7xS ++ cat /tmp/tmp.EfMVgtuc5E ++ rm /tmp/tmp.0y5tD8L7xS /tmp/tmp.EfMVgtuc5E ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O0n00fbDvF +++ mktemp ++ local LAST_ERR=/tmp/tmp.weXvxxMkcc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O0n00fbDvF ++ cat /tmp/tmp.weXvxxMkcc ++ rm /tmp/tmp.O0n00fbDvF /tmp/tmp.weXvxxMkcc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4jaaQGOeMk +++ mktemp ++ local LAST_ERR=/tmp/tmp.vU0rzPb9LO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4jaaQGOeMk ++ cat /tmp/tmp.vU0rzPb9LO ++ rm /tmp/tmp.4jaaQGOeMk /tmp/tmp.vU0rzPb9LO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZXAQCpwVKD +++ mktemp ++ local LAST_ERR=/tmp/tmp.UpDSzmx9n5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZXAQCpwVKD ++ cat /tmp/tmp.UpDSzmx9n5 ++ rm /tmp/tmp.ZXAQCpwVKD /tmp/tmp.UpDSzmx9n5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1LmqAQ3n3j +++ mktemp ++ local LAST_ERR=/tmp/tmp.jqMETUXufH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1LmqAQ3n3j ++ cat /tmp/tmp.jqMETUXufH ++ rm /tmp/tmp.1LmqAQ3n3j /tmp/tmp.jqMETUXufH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Kr6bjsb0ti +++ mktemp ++ local LAST_ERR=/tmp/tmp.XDsbqJDoNs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Kr6bjsb0ti ++ cat /tmp/tmp.XDsbqJDoNs ++ rm /tmp/tmp.Kr6bjsb0ti /tmp/tmp.XDsbqJDoNs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8KGgjx9nJY +++ mktemp ++ local LAST_ERR=/tmp/tmp.0rdNfGRh0k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8KGgjx9nJY ++ cat /tmp/tmp.0rdNfGRh0k ++ rm /tmp/tmp.8KGgjx9nJY /tmp/tmp.0rdNfGRh0k ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1LjQmSRsKH +++ mktemp ++ local LAST_ERR=/tmp/tmp.sjdL7nwUrr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1LjQmSRsKH ++ cat /tmp/tmp.sjdL7nwUrr ++ rm /tmp/tmp.1LjQmSRsKH /tmp/tmp.sjdL7nwUrr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h1UCyikPH6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.p1teF8rxUQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h1UCyikPH6 ++ cat /tmp/tmp.p1teF8rxUQ ++ rm /tmp/tmp.h1UCyikPH6 /tmp/tmp.p1teF8rxUQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ISf7ePpL6R +++ mktemp ++ local LAST_ERR=/tmp/tmp.CwEg0TMDw9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ISf7ePpL6R ++ cat /tmp/tmp.CwEg0TMDw9 ++ rm /tmp/tmp.ISf7ePpL6R /tmp/tmp.CwEg0TMDw9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qPDuSdxDV2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Di1m4pHChf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qPDuSdxDV2 ++ cat /tmp/tmp.Di1m4pHChf ++ rm /tmp/tmp.qPDuSdxDV2 /tmp/tmp.Di1m4pHChf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lQMglqyW7L +++ mktemp ++ local LAST_ERR=/tmp/tmp.slXOEGRMXi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lQMglqyW7L ++ cat /tmp/tmp.slXOEGRMXi ++ rm /tmp/tmp.lQMglqyW7L /tmp/tmp.slXOEGRMXi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TLFnuQYFqF +++ mktemp ++ local LAST_ERR=/tmp/tmp.BzFJ1aHgBE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TLFnuQYFqF ++ cat /tmp/tmp.BzFJ1aHgBE ++ rm /tmp/tmp.TLFnuQYFqF /tmp/tmp.BzFJ1aHgBE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hk8kFQXjHk +++ mktemp ++ local LAST_ERR=/tmp/tmp.SLpVJazF2Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hk8kFQXjHk ++ cat /tmp/tmp.SLpVJazF2Y ++ rm /tmp/tmp.hk8kFQXjHk /tmp/tmp.SLpVJazF2Y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rFO67gE6SN +++ mktemp ++ local LAST_ERR=/tmp/tmp.umh74P770O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rFO67gE6SN ++ cat /tmp/tmp.umh74P770O ++ rm /tmp/tmp.rFO67gE6SN /tmp/tmp.umh74P770O ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3j7kw289a6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.H44pSQIQVK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3j7kw289a6 ++ cat /tmp/tmp.H44pSQIQVK ++ rm /tmp/tmp.3j7kw289a6 /tmp/tmp.H44pSQIQVK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X8V1tZIMUN +++ mktemp ++ local LAST_ERR=/tmp/tmp.I88Xur8oLN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X8V1tZIMUN ++ cat /tmp/tmp.I88Xur8oLN ++ rm /tmp/tmp.X8V1tZIMUN /tmp/tmp.I88Xur8oLN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Iy0yWUFOrL +++ mktemp ++ local LAST_ERR=/tmp/tmp.gwgGcCgA8G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Iy0yWUFOrL ++ cat /tmp/tmp.gwgGcCgA8G ++ rm /tmp/tmp.Iy0yWUFOrL /tmp/tmp.gwgGcCgA8G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hIbOFiybLM +++ mktemp ++ local LAST_ERR=/tmp/tmp.4n8CPipZPN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hIbOFiybLM ++ cat /tmp/tmp.4n8CPipZPN ++ rm /tmp/tmp.hIbOFiybLM /tmp/tmp.4n8CPipZPN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JjyH5qnN5U +++ mktemp ++ local LAST_ERR=/tmp/tmp.aUPi884cwL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JjyH5qnN5U ++ cat /tmp/tmp.aUPi884cwL ++ rm /tmp/tmp.JjyH5qnN5U /tmp/tmp.aUPi884cwL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dofPM9RKnF +++ mktemp ++ local LAST_ERR=/tmp/tmp.CpfRce2axz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dofPM9RKnF ++ cat /tmp/tmp.CpfRce2axz ++ rm /tmp/tmp.dofPM9RKnF /tmp/tmp.CpfRce2axz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vQuAJu4quF +++ mktemp ++ local LAST_ERR=/tmp/tmp.4pXbChG7xu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vQuAJu4quF ++ cat /tmp/tmp.4pXbChG7xu ++ rm /tmp/tmp.vQuAJu4quF /tmp/tmp.4pXbChG7xu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4neOhlBrck +++ mktemp ++ local LAST_ERR=/tmp/tmp.DDb8uI0FGs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4neOhlBrck ++ cat /tmp/tmp.DDb8uI0FGs ++ rm /tmp/tmp.4neOhlBrck /tmp/tmp.DDb8uI0FGs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qV1EYl4paH +++ mktemp ++ local LAST_ERR=/tmp/tmp.IX2gHt4VLl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qV1EYl4paH ++ cat /tmp/tmp.IX2gHt4VLl ++ rm /tmp/tmp.qV1EYl4paH /tmp/tmp.IX2gHt4VLl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xE4nab2sTF +++ mktemp ++ local LAST_ERR=/tmp/tmp.MmftzCC5aD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xE4nab2sTF ++ cat /tmp/tmp.MmftzCC5aD ++ rm /tmp/tmp.xE4nab2sTF /tmp/tmp.MmftzCC5aD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wyeTFPwS6E +++ mktemp ++ local LAST_ERR=/tmp/tmp.bPdE6CjRSa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wyeTFPwS6E ++ cat /tmp/tmp.bPdE6CjRSa ++ rm /tmp/tmp.wyeTFPwS6E /tmp/tmp.bPdE6CjRSa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b271R6gc46 +++ mktemp ++ local LAST_ERR=/tmp/tmp.CfBvZ9mvVm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b271R6gc46 ++ cat /tmp/tmp.CfBvZ9mvVm ++ rm /tmp/tmp.b271R6gc46 /tmp/tmp.CfBvZ9mvVm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wd4ulZMUJf +++ mktemp ++ local LAST_ERR=/tmp/tmp.EX4HZx5u9B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Wd4ulZMUJf ++ cat /tmp/tmp.EX4HZx5u9B ++ rm /tmp/tmp.Wd4ulZMUJf /tmp/tmp.EX4HZx5u9B ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vL5CcVZ6cR +++ mktemp ++ local LAST_ERR=/tmp/tmp.bGIVnCfVOL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vL5CcVZ6cR ++ cat /tmp/tmp.bGIVnCfVOL ++ rm /tmp/tmp.vL5CcVZ6cR /tmp/tmp.bGIVnCfVOL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IscWhGy22e +++ mktemp ++ local LAST_ERR=/tmp/tmp.931pmuEHWP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IscWhGy22e ++ cat /tmp/tmp.931pmuEHWP ++ rm /tmp/tmp.IscWhGy22e /tmp/tmp.931pmuEHWP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qe6a7mLJtc +++ mktemp ++ local LAST_ERR=/tmp/tmp.DYYJprjVQV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qe6a7mLJtc ++ cat /tmp/tmp.DYYJprjVQV ++ rm /tmp/tmp.qe6a7mLJtc /tmp/tmp.DYYJprjVQV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PmPSYlwOZq +++ mktemp ++ local LAST_ERR=/tmp/tmp.rdybwu7JaC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PmPSYlwOZq ++ cat /tmp/tmp.rdybwu7JaC ++ rm /tmp/tmp.PmPSYlwOZq /tmp/tmp.rdybwu7JaC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SmBIw3FidK +++ mktemp ++ local LAST_ERR=/tmp/tmp.kYD69LTP3q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SmBIw3FidK ++ cat /tmp/tmp.kYD69LTP3q ++ rm /tmp/tmp.SmBIw3FidK /tmp/tmp.kYD69LTP3q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a5wsvMu5Mk +++ mktemp ++ local LAST_ERR=/tmp/tmp.sfcGXXOVmM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a5wsvMu5Mk ++ cat /tmp/tmp.sfcGXXOVmM ++ rm /tmp/tmp.a5wsvMu5Mk /tmp/tmp.sfcGXXOVmM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hWvVyA9ToO +++ mktemp ++ local LAST_ERR=/tmp/tmp.hNhbHB0Tr6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hWvVyA9ToO ++ cat /tmp/tmp.hNhbHB0Tr6 ++ rm /tmp/tmp.hWvVyA9ToO /tmp/tmp.hNhbHB0Tr6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NtR4IiNlqA +++ mktemp ++ local LAST_ERR=/tmp/tmp.stirzRH7hp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NtR4IiNlqA ++ cat /tmp/tmp.stirzRH7hp ++ rm /tmp/tmp.NtR4IiNlqA /tmp/tmp.stirzRH7hp ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oSn0i0fkMQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.PYE6Cewmva ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oSn0i0fkMQ ++ cat /tmp/tmp.PYE6Cewmva ++ rm /tmp/tmp.oSn0i0fkMQ /tmp/tmp.PYE6Cewmva ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xCpC05b3E9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.AU5XyMjimI +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xCpC05b3E9 +++++ cat /tmp/tmp.AU5XyMjimI +++++ rm /tmp/tmp.xCpC05b3E9 /tmp/tmp.AU5XyMjimI +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l6tqRFSaoI +++ mktemp ++ local LAST_ERR=/tmp/tmp.BHnb8oti43 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l6tqRFSaoI ++ cat /tmp/tmp.BHnb8oti43 ++ rm /tmp/tmp.l6tqRFSaoI /tmp/tmp.BHnb8oti43 ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UGB8rMAojD +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZNKQRKhVWe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UGB8rMAojD ++ cat /tmp/tmp.ZNKQRKhVWe ++ rm /tmp/tmp.UGB8rMAojD /tmp/tmp.ZNKQRKhVWe ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.QuWJaRq767 ++ mktemp + local LAST_ERR=/tmp/tmp.yKPyhcqm8i + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QuWJaRq767 secret/my-cluster-secrets patched + cat /tmp/tmp.yKPyhcqm8i + rm /tmp/tmp.QuWJaRq767 /tmp/tmp.yKPyhcqm8i + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o9p1nFZeHP +++ mktemp ++ local LAST_ERR=/tmp/tmp.7a9t8TFB8F ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o9p1nFZeHP ++ cat /tmp/tmp.7a9t8TFB8F ++ rm /tmp/tmp.o9p1nFZeHP /tmp/tmp.7a9t8TFB8F ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WAYY3i99Kp +++ mktemp ++ local LAST_ERR=/tmp/tmp.m5gChAUZbI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WAYY3i99Kp ++ cat /tmp/tmp.m5gChAUZbI ++ rm /tmp/tmp.WAYY3i99Kp /tmp/tmp.m5gChAUZbI ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.HC0cBXwxHc ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.brSiPIg660 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.HC0cBXwxHc +++++ cat /tmp/tmp.brSiPIg660 +++++ rm /tmp/tmp.HC0cBXwxHc /tmp/tmp.brSiPIg660 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uitWsmTTFH +++ mktemp ++ local LAST_ERR=/tmp/tmp.cPVfpEQa1h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uitWsmTTFH ++ cat /tmp/tmp.cPVfpEQa1h ++ rm /tmp/tmp.uitWsmTTFH /tmp/tmp.cPVfpEQa1h ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kJkVNJU7rR +++ mktemp ++ local LAST_ERR=/tmp/tmp.04Kx20eRB9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kJkVNJU7rR ++ cat /tmp/tmp.04Kx20eRB9 ++ rm /tmp/tmp.kJkVNJU7rR /tmp/tmp.04Kx20eRB9 ++ return 0 + client_pod=pxc-client-59944c5bbf-nhd97 + wait_pod pxc-client-59944c5bbf-nhd97 + local pod=pxc-client-59944c5bbf-nhd97 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-nhd97 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-nhd97 condition met waiting for pod/pxc-client-59944c5bbf-nhd97 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.WP88OxA47Z/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-3.sql /tmp/tmp.WP88OxA47Z/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0tMfsqy7a0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xNV2tUfJ4q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0tMfsqy7a0 ++ cat /tmp/tmp.xNV2tUfJ4q ++ rm /tmp/tmp.0tMfsqy7a0 /tmp/tmp.xNV2tUfJ4q ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-17636 + local namespace=users-17636 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + grep -v 'get backup status: Job.batch' + sort -u + tee /tmp/tmp.WP88OxA47Z/operator.log + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator + grep -v level=info +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.KCNXExYvQE +++ mktemp ++ local LAST_ERR=/tmp/tmp.cns2CDSUDA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KCNXExYvQE ++ cat /tmp/tmp.cns2CDSUDA ++ rm /tmp/tmp.KCNXExYvQE /tmp/tmp.cns2CDSUDA ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-58dd9fd94c-ntrb4 ++ mktemp + local LAST_OUT=/tmp/tmp.OKQsfXLZ8l ++ mktemp + local LAST_ERR=/tmp/tmp.R1E6sygJdt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-58dd9fd94c-ntrb4 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OKQsfXLZ8l + cat /tmp/tmp.R1E6sygJdt + rm /tmp/tmp.OKQsfXLZ8l /tmp/tmp.R1E6sygJdt + return 0 2025-11-06T14:08:54.746Z INFO setup Manager starting up {"gitCommit": "89209ce179be0afd4246cdcc6b564d43f706c45f", "gitBranch": "PR-2207-89209ce1", "buildTime": "2025-11-06T11:54:31Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-11-06T14:08:54.746Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1139000"} 2025-11-06T14:08:54.749Z INFO setup Registering Components. 2025-11-06T14:08:55.331Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-11-06T14:08:55.331Z INFO setup Starting the Cmd. 2025-11-06T14:08:55.332Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-11-06T14:08:55.332Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-11-06T14:08:55.332Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-11-06T14:08:55.332Z INFO controller-runtime.metrics Starting metrics server 2025-11-06T14:08:55.332Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-11-06T14:08:55.332Z INFO controller-runtime.webhook Starting webhook server 2025-11-06T14:08:55.332Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-11-06T14:08:55.433Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-11-06T14:08:55.463Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-11-06T14:08:55.463Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-11-06T14:08:55.463Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-11-06T14:08:55.463Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-11-06T14:08:55.463Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-11-06T14:08:55.464Z DEBUG events percona-xtradb-cluster-operator-58dd9fd94c-ntrb4_27dfb165-37eb-4d59-9d69-0e1039338fe9 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"afda66cb-4716-4c16-b82e-3c0071ee9948","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1762438135456751009"}, "reason": "LeaderElection"} 2025-11-06T14:08:55.564Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-11-06T14:08:55.564Z INFO Starting Controller {"controller": "pxc-controller"} 2025-11-06T14:08:55.564Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-11-06T14:08:55.564Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-11-06T14:08:55.564Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-11-06T14:08:55.564Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-11-06T14:09:36.926Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "83b39393-dab5-4039-b817-5a4dca2ce0f4", "version": "1.19.0"} 2025-11-06T14:09:37.204Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "83b39393-dab5-4039-b817-5a4dca2ce0f4", "secrets": "my-cluster-secrets"} 2025-11-06T14:09:37.421Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "83b39393-dab5-4039-b817-5a4dca2ce0f4", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-06T14:09:37.440Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "83b39393-dab5-4039-b817-5a4dca2ce0f4", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-06T14:09:38.015Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "83b39393-dab5-4039-b817-5a4dca2ce0f4", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:09:38.132Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "25efdb37-4483-4322-81be-1b152eebd178", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-06T14:09:38.183Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "25efdb37-4483-4322-81be-1b152eebd178", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-06T14:09:38.244Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "25efdb37-4483-4322-81be-1b152eebd178", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-06T14:09:38.275Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "25efdb37-4483-4322-81be-1b152eebd178", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-06T14:09:38.352Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "25efdb37-4483-4322-81be-1b152eebd178", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-06T14:09:38.457Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "25efdb37-4483-4322-81be-1b152eebd178", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-06T14:09:39.434Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "d0b7d076-cb0b-4f91-aaa2-37e2c6ecd8bd", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-06T14:09:39.453Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "d0b7d076-cb0b-4f91-aaa2-37e2c6ecd8bd", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-06T14:10:56.546Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b9a98245-b763-4556-98d1-62bdc2c8a30a", "user": "operator"} 2025-11-06T14:10:56.595Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b9a98245-b763-4556-98d1-62bdc2c8a30a", "user": "monitor"} 2025-11-06T14:10:56.645Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b9a98245-b763-4556-98d1-62bdc2c8a30a"} 2025-11-06T14:10:56.678Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b9a98245-b763-4556-98d1-62bdc2c8a30a"} 2025-11-06T14:10:56.709Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b9a98245-b763-4556-98d1-62bdc2c8a30a", "user": "xtrabackup"} 2025-11-06T14:10:56.753Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b9a98245-b763-4556-98d1-62bdc2c8a30a"} 2025-11-06T14:10:56.784Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b9a98245-b763-4556-98d1-62bdc2c8a30a", "user": "replication"} 2025-11-06T14:10:56.792Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b9a98245-b763-4556-98d1-62bdc2c8a30a", "err": "get primary pxc pod: not found"} 2025-11-06T14:11:01.511Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "17c1f09d-9c41-4135-b310-d25aefc5abf9", "err": "get primary pxc pod: not found"} 2025-11-06T14:11:06.685Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "0592aad4-828e-4669-9e6d-f920f394e935", "err": "get primary pxc pod: not found"} 2025-11-06T14:13:28.383Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3e17aed6-83e1-440d-9bdb-299a9ad8bcb8", "user": "root"} 2025-11-06T14:13:28.521Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3e17aed6-83e1-440d-9bdb-299a9ad8bcb8", "new version": "8.0.43-34.1"} 2025-11-06T14:13:30.352Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3e17aed6-83e1-440d-9bdb-299a9ad8bcb8"} 2025-11-06T14:13:35.131Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c1ed1793-cff2-49fa-99a9-b7c2a5e4939e"} 2025-11-06T14:13:40.456Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6281dfa1-f4a7-40a4-92e0-fb675be5c46b"} 2025-11-06T14:13:45.961Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "cbe088c6-791c-41cb-b6b3-ab5ec928f552"} 2025-11-06T14:13:51.607Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "eeed42be-f47c-4770-afca-06585a9ab534"} 2025-11-06T14:13:56.347Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "46740293-cb7b-4fe2-940b-5d009723c88d"} 2025-11-06T14:14:01.931Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "4dd8fa81-e0af-4d26-bc53-ea54f1b2afa7"} 2025-11-06T14:14:07.149Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7331c16f-6107-407a-b84a-f349cf065230"} 2025-11-06T14:14:12.927Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "71296f31-57d4-40e8-b222-b537d27fa07a"} 2025-11-06T14:14:18.203Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "75d30bcc-a03d-484e-8756-b2915b55bf8f"} 2025-11-06T14:14:23.752Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "26c2ad49-4a2b-49aa-aefd-01d29549a736"} 2025-11-06T14:14:29.055Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b0e92c9f-cb85-4448-906c-c9d2990413a2"} 2025-11-06T14:14:34.458Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "66d9f682-1e91-48f8-acc5-ec01d8151513"} 2025-11-06T14:14:39.815Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "9511f479-9d79-404f-b69c-639030939028"} 2025-11-06T14:14:45.054Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "82ab6948-4efe-448c-80d2-15d6f0c79b2f"} 2025-11-06T14:14:50.506Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "9e97bba1-afb7-4f02-8986-c9b39d30df20"} 2025-11-06T14:14:55.532Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8a44f7c2-6d3d-4826-b6aa-f1fc92c7c1ba"} 2025-11-06T14:14:56.682Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3db77f2d-abba-4ee3-90de-6acd44823a61", "user": "root"} 2025-11-06T14:14:56.703Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3db77f2d-abba-4ee3-90de-6acd44823a61", "user": "root"} 2025-11-06T14:14:56.726Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3db77f2d-abba-4ee3-90de-6acd44823a61", "secret": "some-name-mysql-init", "user": "root"} 2025-11-06T14:14:59.381Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3db77f2d-abba-4ee3-90de-6acd44823a61"} 2025-11-06T14:14:59.404Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3db77f2d-abba-4ee3-90de-6acd44823a61", "user": "root"} 2025-11-06T14:14:59.425Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3db77f2d-abba-4ee3-90de-6acd44823a61", "user": "root"} 2025-11-06T14:15:01.324Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3db77f2d-abba-4ee3-90de-6acd44823a61"} 2025-11-06T14:15:06.861Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "038365a5-462b-444d-9c46-04ed37686919"} 2025-11-06T14:15:11.958Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "12b0f9e9-7b55-4dad-90ac-7479dc034760"} 2025-11-06T14:15:17.119Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "45d04bfb-6c95-4f28-9c05-075448fa11ba", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:15:17.178Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "45d04bfb-6c95-4f28-9c05-075448fa11ba", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:15:17.381Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "0d34433a-2e7e-4e1a-9f2f-3d6bb472ad86"} 2025-11-06T14:15:37.910Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "04519df3-fe5b-49fe-b0d3-4b661748fc2c", "err": "get primary pxc pod: not found"} 2025-11-06T14:15:42.112Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "04519df3-fe5b-49fe-b0d3-4b661748fc2c", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:15:42.259Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7030f80b-f4da-4c23-a7eb-85b1afbd6837", "user": "proxyadmin"} 2025-11-06T14:15:42.259Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7030f80b-f4da-4c23-a7eb-85b1afbd6837", "user": "proxyadmin"} 2025-11-06T14:15:42.289Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7030f80b-f4da-4c23-a7eb-85b1afbd6837", "user": "proxyadmin"} 2025-11-06T14:15:42.309Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7030f80b-f4da-4c23-a7eb-85b1afbd6837", "user": "proxyadmin"} 2025-11-06T14:15:42.309Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7030f80b-f4da-4c23-a7eb-85b1afbd6837", "last-applied-secret": "f1f3421f368c01fae6484625d500ced346c1397364f6880530474c03a11ba260"} 2025-11-06T14:15:42.313Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7030f80b-f4da-4c23-a7eb-85b1afbd6837", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:15:42.374Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7030f80b-f4da-4c23-a7eb-85b1afbd6837", "err": "get primary pxc pod: not found"} 2025-11-06T14:15:44.093Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7030f80b-f4da-4c23-a7eb-85b1afbd6837", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:16:23.478Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7dd9b3bd-d241-4058-9780-e3fc35e0b0b6"} 2025-11-06T14:16:27.978Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8afa7ae7-8ad7-461b-a9d6-67ecb9993f5a"} 2025-11-06T14:16:28.879Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "72c919fc-c92e-446e-bc39-70f5bc9d05c5", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:16:28.933Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "72c919fc-c92e-446e-bc39-70f5bc9d05c5", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:16:30.423Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "61b408af-8b1a-496c-a09c-495e346deaa5", "user": "xtrabackup"} 2025-11-06T14:16:30.436Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "61b408af-8b1a-496c-a09c-495e346deaa5", "user": "xtrabackup"} 2025-11-06T14:16:30.458Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "61b408af-8b1a-496c-a09c-495e346deaa5", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-06T14:16:30.477Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "61b408af-8b1a-496c-a09c-495e346deaa5", "user": "xtrabackup"} 2025-11-06T14:16:30.491Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "61b408af-8b1a-496c-a09c-495e346deaa5", "user": "xtrabackup"} 2025-11-06T14:16:30.497Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "61b408af-8b1a-496c-a09c-495e346deaa5", "last-applied-secret": "0a4f82fb5bd65d8d78c50d7cba6645f81bdb0e5e22e3448a4bfde18df16501bf"} 2025-11-06T14:16:30.500Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "61b408af-8b1a-496c-a09c-495e346deaa5", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:16:31.207Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "72c919fc-c92e-446e-bc39-70f5bc9d05c5"} 2025-11-06T14:17:19.367Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "0ff4bdce-1718-41f5-b227-7a779ae8ff03", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-17636 on 34.118.224.10:53: no such host"} 2025-11-06T14:17:29.113Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "68f7af96-c89d-4f23-9091-80709ca61f16", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: invalid connection"} 2025-11-06T14:18:17.383Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "672d2a60-4390-4e98-89e9-0f80335f6326", "err": "failed to connect to pod some-name-pxc-0: invalid connection"} 2025-11-06T14:18:22.552Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "73d89893-dfbc-49a6-ba64-96e797b7105e", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:18:27.705Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "73b2c8bf-d705-4258-b2de-9b1736b2b3fe", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:18:32.882Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "31ed979b-20d5-480c-85c4-0ae00669f41e", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:18:38.016Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "e23bb92f-9333-4d7a-b8ac-d466e3fd45c2", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:18:43.148Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "2e5242a7-efb6-46a0-881a-168244aa092c", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:18:48.290Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "94b1f8f8-4845-4044-a123-a9e6c1e295df", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:18:55.885Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "12f3b877-07ef-44c0-9ec9-a8033f164d39"} 2025-11-06T14:19:01.096Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c191ea16-ddde-4961-937f-a4bcaa196449"} 2025-11-06T14:19:03.008Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "1b4bf480-2470-4cfe-b7e8-927bbec00166", "user": "monitor"} 2025-11-06T14:19:03.023Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "1b4bf480-2470-4cfe-b7e8-927bbec00166", "user": "monitor"} 2025-11-06T14:19:03.049Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "1b4bf480-2470-4cfe-b7e8-927bbec00166", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-06T14:19:03.068Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "1b4bf480-2470-4cfe-b7e8-927bbec00166", "user": "monitor"} 2025-11-06T14:19:03.111Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "1b4bf480-2470-4cfe-b7e8-927bbec00166", "user": "monitor"} 2025-11-06T14:19:03.404Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "1b4bf480-2470-4cfe-b7e8-927bbec00166", "last-applied-secret": "9906a036bb2051c30696b6b402277dc91da0df6526622a31b677488f6cdeca69"} 2025-11-06T14:19:03.408Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "1b4bf480-2470-4cfe-b7e8-927bbec00166", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:19:06.475Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "1b4bf480-2470-4cfe-b7e8-927bbec00166", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:19:52.458Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "469940e1-21e1-4a7e-b31b-7bfc695078d3", "user": "monitor"} 2025-11-06T14:19:54.390Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "469940e1-21e1-4a7e-b31b-7bfc695078d3"} 2025-11-06T14:19:57.458Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b9eadfc5-6b73-4fa5-b306-dc1d31b3a996", "user": "monitor"} 2025-11-06T14:19:59.547Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b9eadfc5-6b73-4fa5-b306-dc1d31b3a996"} 2025-11-06T14:20:03.073Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b1941c0c-cebb-4117-bc44-8315e531f672", "user": "monitor"} 2025-11-06T14:20:05.158Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b1941c0c-cebb-4117-bc44-8315e531f672"} 2025-11-06T14:20:08.672Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "82e60a6b-adce-4ccf-a8cf-d3edab39bdff", "user": "monitor"} 2025-11-06T14:20:10.766Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "82e60a6b-adce-4ccf-a8cf-d3edab39bdff"} 2025-11-06T14:20:14.238Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8bb45315-9cbb-43aa-93fa-d78deea14716", "user": "monitor"} 2025-11-06T14:20:15.103Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8bb45315-9cbb-43aa-93fa-d78deea14716", "user": "monitor"} 2025-11-06T14:20:15.121Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8bb45315-9cbb-43aa-93fa-d78deea14716", "last-applied-secret": "9906a036bb2051c30696b6b402277dc91da0df6526622a31b677488f6cdeca69"} 2025-11-06T14:20:16.869Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8bb45315-9cbb-43aa-93fa-d78deea14716"} 2025-11-06T14:20:22.974Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c49e538a-8a3a-4643-bcc8-a42c10a94bf3"} 2025-11-06T14:20:28.456Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "9054d166-c91e-48b2-a9a9-be9573f25284"} 2025-11-06T14:20:33.695Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b2f27d68-bbad-423f-9d0f-4d3fda3d685e"} 2025-11-06T14:20:39.258Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "df7022e6-2f47-49de-8c9b-b8146ef08f45"} 2025-11-06T14:20:41.337Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c0573b7a-d42a-4275-9808-8bec77c97b3b", "user": "operator"} 2025-11-06T14:20:41.351Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c0573b7a-d42a-4275-9808-8bec77c97b3b", "user": "operator"} 2025-11-06T14:20:41.368Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c0573b7a-d42a-4275-9808-8bec77c97b3b", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-06T14:20:41.384Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c0573b7a-d42a-4275-9808-8bec77c97b3b", "user": "operator"} 2025-11-06T14:20:41.399Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c0573b7a-d42a-4275-9808-8bec77c97b3b", "user": "operator"} 2025-11-06T14:20:41.419Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c0573b7a-d42a-4275-9808-8bec77c97b3b", "last-applied-secret": "58bc9af220b124fafe72ea91485722e92f278159e42dbc408e241ea631424562"} 2025-11-06T14:20:41.423Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c0573b7a-d42a-4275-9808-8bec77c97b3b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:20:44.927Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c0573b7a-d42a-4275-9808-8bec77c97b3b", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:21:21.261Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "4b457f06-c325-4d1d-a238-0c13448c8e2a"} 2025-11-06T14:21:25.346Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "d756b922-5588-4b98-9b35-08c08335e4d6"} 2025-11-06T14:21:30.703Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "49836e51-51f3-429e-9837-8769157fe733"} 2025-11-06T14:21:36.329Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3e7902d3-a0ad-4359-b830-c28db1e9be82"} 2025-11-06T14:21:38.020Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "secrets": "my-cluster-secrets-2"} 2025-11-06T14:21:38.028Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "root"} 2025-11-06T14:21:38.059Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "root"} 2025-11-06T14:21:38.078Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "secret": "some-name-mysql-init", "user": "root"} 2025-11-06T14:21:40.557Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362"} 2025-11-06T14:21:40.579Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "root"} 2025-11-06T14:21:40.601Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "root"} 2025-11-06T14:21:40.608Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "operator"} 2025-11-06T14:21:40.623Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "operator"} 2025-11-06T14:21:40.640Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-06T14:21:40.659Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "operator"} 2025-11-06T14:21:40.672Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "operator"} 2025-11-06T14:21:40.679Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "monitor"} 2025-11-06T14:21:40.695Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "monitor"} 2025-11-06T14:21:40.720Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-06T14:21:40.740Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "monitor"} 2025-11-06T14:21:40.770Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "monitor"} 2025-11-06T14:21:41.059Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "xtrabackup"} 2025-11-06T14:21:41.075Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "xtrabackup"} 2025-11-06T14:21:41.093Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-06T14:21:41.114Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "xtrabackup"} 2025-11-06T14:21:41.128Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "xtrabackup"} 2025-11-06T14:21:41.136Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "replication"} 2025-11-06T14:21:41.150Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "replication"} 2025-11-06T14:21:41.169Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-06T14:21:41.189Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "replication"} 2025-11-06T14:21:41.204Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "replication"} 2025-11-06T14:21:41.204Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "proxyadmin"} 2025-11-06T14:21:41.224Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "proxyadmin"} 2025-11-06T14:21:41.249Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "user": "proxyadmin"} 2025-11-06T14:21:41.249Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "last-applied-secret": "dfbacc85608362120c47cf3fea3c325437200a3f2f47a3fcc46addb717371144"} 2025-11-06T14:21:41.249Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "last-applied-secret": "dfbacc85608362120c47cf3fea3c325437200a3f2f47a3fcc46addb717371144"} 2025-11-06T14:21:41.252Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:21:41.305Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:21:43.219Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c261a530-fb99-4e28-ba0f-a41155573362", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:22:34.737Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "bcdf04e9-036f-45c8-a76b-8aad89839854", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-17636 on 34.118.224.10:53: no such host"} 2025-11-06T14:22:39.747Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "976d4402-80d2-4c1f-ab78-ecae41b6941f", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-17636 on 34.118.224.10:53: no such host"} 2025-11-06T14:22:45.012Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "ca1751e2-9666-41d7-b8b8-376f35537180", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: invalid connection"} 2025-11-06T14:23:33.499Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7d1a2b4d-ff7c-44fc-bd28-d98a9e79e171", "err": "failed to connect to pod some-name-pxc-0: invalid connection"} 2025-11-06T14:23:38.709Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "018d1d30-2b03-43a1-bf6f-6b9fee549946", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:23:43.867Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "0c13d580-39a5-4acb-992e-af0dddad9f43", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:23:49.011Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "9ef2ac0b-43b5-4506-82d8-990c0575b4de", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:23:54.187Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b93635b1-d432-4a6c-a59b-745680a87209", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:23:59.326Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8eb1879b-1d58-4851-93dc-7a60726c38ac", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:24:04.469Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6fb39608-ab58-409a-aaf7-e361bcbedb9d", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:24:09.632Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "2e017047-3c3a-4e52-8b5f-3761b03a9541", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:24:14.789Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "be156ae1-2982-4f45-bcfa-75adc47340b9", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:24:21.206Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "e4d3a9b7-b049-41b9-94ee-98fbaff6e577", "user": "monitor"} 2025-11-06T14:24:22.083Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "e4d3a9b7-b049-41b9-94ee-98fbaff6e577", "user": "monitor"} 2025-11-06T14:24:22.097Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "e4d3a9b7-b049-41b9-94ee-98fbaff6e577", "last-applied-secret": "dfbacc85608362120c47cf3fea3c325437200a3f2f47a3fcc46addb717371144"} 2025-11-06T14:24:23.902Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "e4d3a9b7-b049-41b9-94ee-98fbaff6e577"} 2025-11-06T14:24:26.741Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8032026d-65ab-4a31-9ab0-6599eac0d900", "user": "operator"} 2025-11-06T14:24:26.754Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8032026d-65ab-4a31-9ab0-6599eac0d900", "user": "operator"} 2025-11-06T14:24:26.778Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8032026d-65ab-4a31-9ab0-6599eac0d900", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-06T14:24:26.803Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8032026d-65ab-4a31-9ab0-6599eac0d900", "user": "operator"} 2025-11-06T14:24:26.817Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8032026d-65ab-4a31-9ab0-6599eac0d900", "user": "operator"} 2025-11-06T14:24:26.835Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8032026d-65ab-4a31-9ab0-6599eac0d900", "last-applied-secret": "5310a213c2afe2dda428b5e5ab99467072fe5c849c0813bc5b0deac0e910c1e2"} 2025-11-06T14:24:26.839Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8032026d-65ab-4a31-9ab0-6599eac0d900", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:24:28.519Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "fe091b6d-03f3-4e46-9dda-fcf9aee20bab", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:24:34.674Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "e41cb554-6b32-478c-b37e-f816905497b1", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:25:04.012Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "f519eb4b-428d-44f2-809b-9e9fbad04186"} 2025-11-06T14:25:08.405Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "db5dfe06-2aa2-46e0-9dee-9428c2713397"} 2025-11-06T14:25:13.713Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "f0975712-6184-4da7-94fb-9a0b7477b1e0"} 2025-11-06T14:25:19.087Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "4f086535-c656-4959-bba8-ae5d82f3afbe"} 2025-11-06T14:25:24.331Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "ee39aaee-fbdd-47d4-a9a9-e87c002683ff"} 2025-11-06T14:25:29.627Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "f4bd6fc7-59dc-4543-9d5c-32f8291a48c0"} 2025-11-06T14:25:35.548Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "02666c34-d44b-448c-8310-785bc0ff4f83"} 2025-11-06T14:25:40.429Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "8008a37e-0f30-45a3-94b9-470083145044"} 2025-11-06T14:25:45.725Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "5790596d-8a17-486b-b924-4eff69de95d4"} 2025-11-06T14:25:51.685Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7d6075eb-93b3-4a9b-ac9c-063eb2087d56"} 2025-11-06T14:25:56.384Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "20a0b164-f790-4b66-aad7-df118e9e90c8"} 2025-11-06T14:26:01.482Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "1f75d0d6-40bf-468a-a3b7-3a5e288c9fcc"} 2025-11-06T14:26:07.610Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "327e9705-536d-44a1-853a-16af79496c19"} 2025-11-06T14:26:13.326Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "edc39919-48df-4bfd-b5be-52353b641cd8"} 2025-11-06T14:26:17.116Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "root"} 2025-11-06T14:26:17.137Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "root"} 2025-11-06T14:26:17.164Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "secret": "some-name-mysql-init", "user": "root"} 2025-11-06T14:26:19.797Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8"} 2025-11-06T14:26:19.829Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "root"} 2025-11-06T14:26:19.852Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "root"} 2025-11-06T14:26:19.867Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "monitor"} 2025-11-06T14:26:19.880Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "monitor"} 2025-11-06T14:26:19.900Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-06T14:26:19.919Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "monitor"} 2025-11-06T14:26:19.943Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "monitor"} 2025-11-06T14:26:20.240Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "xtrabackup"} 2025-11-06T14:26:20.252Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "xtrabackup"} 2025-11-06T14:26:20.292Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-06T14:26:20.317Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "xtrabackup"} 2025-11-06T14:26:20.332Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "xtrabackup"} 2025-11-06T14:26:20.340Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "proxyadmin"} 2025-11-06T14:26:20.358Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "proxyadmin"} 2025-11-06T14:26:20.480Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "user": "proxyadmin"} 2025-11-06T14:26:20.480Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "last-applied-secret": "54d0f0ca7684fd7baaa3b3dfe32fe736bdfbf34ad3e556a05dbe2edd2a4f646c"} 2025-11-06T14:26:20.480Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "last-applied-secret": "54d0f0ca7684fd7baaa3b3dfe32fe736bdfbf34ad3e556a05dbe2edd2a4f646c"} 2025-11-06T14:26:20.483Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:26:21.033Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:26:23.378Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "472696c9-a6b8-421e-9d39-0c7221a6b2a8", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:27:15.585Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "16f63148-9d14-4723-9fa7-2dc0d3371dbe", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-17636 on 34.118.224.10:53: no such host"} 2025-11-06T14:27:20.104Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "a025e3ef-f118-49ca-906e-3af97b0d5667", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-17636 on 34.118.224.10:53: no such host"} 2025-11-06T14:28:08.194Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "f0f09c46-81fc-4c2f-a240-818d414ed6c2", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: invalid connection"} 2025-11-06T14:28:13.396Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "1c28822b-db94-46a8-82d1-e8bcbea4c43b", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:28:18.541Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "5d82d9d2-d909-499b-b7cf-9276b2a7f08e", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:28:23.716Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "098428a7-5e3f-4cdb-b680-55e8456fd25d", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:28:28.870Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "143091c3-0f23-44bf-8d7c-3997e49d72f6", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:28:34.022Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c2ce006a-0a3b-479b-b2c6-d3c916efe3f6", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:28:39.201Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "b4138a39-8e06-4952-a4d2-0e83783f7399", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:28:44.371Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6e458d77-5fc3-4214-8b89-8079557cf8d5", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:28:49.504Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "36b1de6b-ff32-40b7-9ba0-d64c22ef71e2", "primary name": "some-name-pxc-0.some-name-pxc.users-17636.svc.cluster.local"} 2025-11-06T14:28:56.054Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3c87f380-9796-4f2b-8a77-e87c926dba16", "user": "monitor"} 2025-11-06T14:28:56.737Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3c87f380-9796-4f2b-8a77-e87c926dba16", "user": "monitor"} 2025-11-06T14:28:56.751Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3c87f380-9796-4f2b-8a77-e87c926dba16", "last-applied-secret": "54d0f0ca7684fd7baaa3b3dfe32fe736bdfbf34ad3e556a05dbe2edd2a4f646c"} 2025-11-06T14:28:57.149Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "user": "root"} 2025-11-06T14:28:57.168Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "user": "root"} 2025-11-06T14:28:57.189Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "secret": "some-name-mysql-init", "user": "root"} 2025-11-06T14:28:57.211Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "user": "root"} 2025-11-06T14:28:57.231Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "user": "root"} 2025-11-06T14:28:57.240Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "user": "operator"} 2025-11-06T14:28:57.252Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "user": "operator"} 2025-11-06T14:28:57.273Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-06T14:28:57.310Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "user": "operator"} 2025-11-06T14:28:57.322Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "user": "operator"} 2025-11-06T14:28:57.328Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "user": "monitor"} 2025-11-06T14:28:57.339Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "user": "monitor"} 2025-11-06T14:28:57.364Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-06T14:28:57.388Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "user": "monitor"} 2025-11-06T14:28:58.227Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "c99d5dd8-4a67-4917-a584-560ee2cd2c1c", "error": "reconcile users: manage sys users: is password propagated: Pod \"some-name-haproxy-0\" not found", "errorVerbose": "Pod \"some-name-haproxy-0\" not found\nis password propagated\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).handleMonitorUser\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:519\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updateUsers\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:160\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileUsers\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:101\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:349\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nmanage sys users\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileUsers\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:103\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:349\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile users\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:351\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:28:58.314Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "47e7fb7e-6f68-44b8-945d-eb133df29057", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:28:58.386Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "47e7fb7e-6f68-44b8-945d-eb133df29057", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-06T14:28:58.433Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "47e7fb7e-6f68-44b8-945d-eb133df29057", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-06T14:28:58.526Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "47e7fb7e-6f68-44b8-945d-eb133df29057", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-06T14:28:58.707Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "47e7fb7e-6f68-44b8-945d-eb133df29057", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-06T14:29:00.155Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "3c87f380-9796-4f2b-8a77-e87c926dba16", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-1\" not found / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-1\" not found / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-17636.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:29:02.493Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "47e7fb7e-6f68-44b8-945d-eb133df29057", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.232.75:3306: connect: connection refused"} 2025-11-06T14:29:03.213Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "ac118fa7-e676-40c3-bb60-d99c302bbe54", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-06T14:29:55.116Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "43844db9-d5c0-4e85-89fc-b89a15655219", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.5.177.54:33062: connect: connection refused"} 2025-11-06T14:30:38.408Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "9aaf0a45-289c-4431-8a8a-ff3eb271fb57", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: invalid connection"} 2025-11-06T14:31:19.879Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "user": "monitor"} 2025-11-06T14:31:21.596Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "user": "monitor"} 2025-11-06T14:31:21.603Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "user": "xtrabackup"} 2025-11-06T14:31:21.614Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "user": "xtrabackup"} 2025-11-06T14:31:21.642Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-06T14:31:21.665Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "user": "xtrabackup"} 2025-11-06T14:31:21.678Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "user": "xtrabackup"} 2025-11-06T14:31:21.684Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "user": "replication"} 2025-11-06T14:31:21.696Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "user": "replication"} 2025-11-06T14:31:21.717Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-06T14:31:21.736Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "user": "replication"} 2025-11-06T14:31:21.748Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "user": "replication"} 2025-11-06T14:31:21.748Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "last-applied-secret": "58bc9af220b124fafe72ea91485722e92f278159e42dbc408e241ea631424562"} 2025-11-06T14:31:21.751Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "6ce29297-4174-4a3f-bb25-dccfa5deeeb8", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:32:17.317Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7b38ad5f-df5a-4f3f-8497-bb89b13e488e", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: invalid connection"} 2025-11-06T14:32:53.822Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "bd171978-a446-4432-9cb7-5be91cbfdd6f", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17636 on 34.118.224.10:53: no such host"} 2025-11-06T14:32:54.161Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "d6e83711-4f31-4fd4-90e7-6e8013bb7c77", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17636 on 34.118.224.10:53: no such host"} 2025-11-06T14:33:04.324Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "f31b6e61-79d0-43fb-8405-d0758703d21b", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp 10.5.176.59:33062: connect: connection refused"} 2025-11-06T14:33:47.016Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7b0bad3b-e629-473b-bc02-794406817356", "user": "monitor"} 2025-11-06T14:33:47.031Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7b0bad3b-e629-473b-bc02-794406817356", "user": "monitor"} 2025-11-06T14:33:47.052Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7b0bad3b-e629-473b-bc02-794406817356", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-06T14:33:47.078Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "7b0bad3b-e629-473b-bc02-794406817356", "user": "monitor"} 2025-11-06T14:33:51.948Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "26f5fd69-9ffb-4050-9cc8-46c1c4c8832e", "user": "monitor"} 2025-11-06T14:33:57.559Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "f3212c3a-8e04-4847-9d09-23377ff643fa", "user": "monitor"} 2025-11-06T14:34:03.218Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "2878ffcd-3aef-4012-ac3b-995c785d1bfe", "user": "monitor"} 2025-11-06T14:34:08.815Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "81ed5e0f-40e6-4b74-8558-388989e75677", "user": "monitor"} 2025-11-06T14:34:14.489Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-17636", "name": "some-name", "reconcileID": "e941b64c-4f14-4d5e-b4a3-92c4182c2fc1", "user": "monitor"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 [mysql] 2025/11/06 14:17:29 packets.go:58 read tcp 10.5.178.50:52182->10.5.177.45:33062: read: connection reset by peer [mysql] 2025/11/06 14:18:17 packets.go:58 read tcp 10.5.178.50:46822->10.5.176.47:33062: read: connection reset by peer [mysql] 2025/11/06 14:22:45 packets.go:58 read tcp 10.5.178.50:39278->10.5.177.47:33062: read: connection reset by peer [mysql] 2025/11/06 14:23:33 packets.go:58 read tcp 10.5.178.50:43480->10.5.176.50:33062: read: connection reset by peer [mysql] 2025/11/06 14:28:08 packets.go:58 read tcp 10.5.178.50:58644->10.5.176.54:33062: read: connection reset by peer [mysql] 2025/11/06 14:30:38 packets.go:58 read tcp 10.5.178.50:44872->10.5.176.57:33062: read: connection reset by peer [mysql] 2025/11/06 14:30:57 packets.go:58 unexpected EOF [mysql] 2025/11/06 14:32:17 packets.go:58 read tcp 10.5.178.50:51730->10.5.177.56:33062: read: connection reset by peer sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 -  }, -  { -  }, -  { -  }, -  }, +  }, -  "0a4f82fb5bd65d8d78c50d7cba6645f81bdb0e5e22e3448a4bfde18df16501bf", +  "0f0ca7684fd7baaa3b3dfe32fe736bdfbf34ad3e556a05dbe2edd2a4f646c", -  "310a213c2afe2dda428b5e5ab99467072fe5c849c0813bc5b0deac0e910c1e2", -  "4d0f0ca7684fd7baaa3b3dfe32fe736bdfbf34ad3e556a05dbe2edd2a4f646c", +  "4d0f0ca7684fd7baaa3b3dfe32fe736bdfbf34ad3e556a05dbe2edd2a4f646c", +  "5310a213c2afe2dda428b5e5ab99467072fe5c849c0813bc5b0deac0e910c1e2", +  "54", -  "58bc9af220b124fafe72ea91485722e92f278159e42dbc408e241ea631424562", +  "58bc9af220b124fafe72ea91485722e92f278159e42dbc408e241ea631424562", +  "8bc9af220b124fafe72ea91485722e92f278159e42dbc408e241ea631424562", -  "9906a036bb2051c30696b6b402277dc91da0df6526622a31b677488f6cdeca69", +  "9906a036bb2051c30696b6b402277dc91da0df6526622a31b677488f6cdeca69", -  Annotations: map[string]string{ +  Annotations: map[string]string{ +  APIVersion: "", -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  APIVersion: "v1", -  Args: []string{"logrotate"}, +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, -  CollisionCount: &0, +  CollisionCount: nil, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-11-06 14:09:38 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-proxysql-5c7478b868", -  CurrentRevision: "some-name-proxysql-648cc585b4", -  CurrentRevision: "some-name-proxysql-769d646477", -  CurrentRevision: "some-name-proxysql-7dcb9f548c", -  CurrentRevision: "some-name-proxysql-9b784db6", -  CurrentRevision: "some-name-proxysql-f7cdc9789", -  CurrentRevision: "some-name-pxc-5cdb7bb5f7", -  CurrentRevision: "some-name-pxc-686856678f", -  CurrentRevision: "some-name-pxc-7ddc55b7d4", -  CurrentRevision: "some-name-pxc-c994dc9", -  CurrentRevision: "some-name-pxc-d67bfd884", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", -  "dfbacc85608362120c47cf3fea3c325437200a3f2f47a3fcc46addb717371144", +  "dfbacc85608362120c47cf3fea3c325437200a3f2f47a3fcc46addb717371144", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, -  Env: []v1.EnvVar{ -  "f1f3421f368c01fae6484625d500ced346c1397364f6880530474c03a11ba260", -  "fbacc85608362120c47cf3fea3c325437200a3f2f47a3fcc46addb717371144", -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Generation: 5, -  Generation: 6, -  Generation: 7, -  Generation: 8, -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  ImagePullPolicy: "Always", +  "last-applied-secret": "0a4f82fb5bd65d8d78c50d7cba6645f81bdb0e5e22e3448a4bfde18df16501bf", +  "last-applied-secret": "f1f3421f368c01fae6484625d500ced346c1397364f6880530474c03a11ba260", +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", -  {Name: "IS_LOGCOLLECTOR", Value: "yes"}, -  {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, -  Name: "logrotate", -  Name: "logs", -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "SERVICE_TYPE", Value: "mysql"}, +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  ObservedGeneration: 5, -  ObservedGeneration: 6, -  ObservedGeneration: 7, -  ObservedGeneration: 8, -  Operation: "Update", -  Operation: "Update", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNThiYzlhZjIyMGIxMjRmYWZlNzJlYTkxNDg1NzIyZTkyZjI3ODE1OWU0MmRiYzQwOGUyNDFlYTYzMTQyNDU2MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNThiYzlhZjIyMGIxMjRmYWZlNzJlYTkxNDg1NzIyZTkyZjI3ODE1OWU0MmRiYzQwOGUyNDFlYTYzMTQyNDU2MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTMxMGEyMTNjMmFmZTJkZGE0MjhiNWU1YWI5OTQ2NzA3MmZlNWM4NDljMDgxM2JjNWIwZGVhYzBlOTEwYzFlMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTMxMGEyMTNjMmFmZTJkZGE0MjhiNWU1YWI5OTQ2NzA3MmZlNWM4NDljMDgxM2JjNWIwZGVhYzBlOTEwYzFlMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTRkMGYwY2E3Njg0ZmQ3YmFhYTNiM2RmZTMyZmU3MzZiZGZiZjM0YWQzZTU1NmEwNWRiZTJlZGQyYTRmNjQ2YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTkwNmEwMzZiYjIwNTFjMzA2OTZiNmI0MDIyNzdkYzkxZGEwZGY2NTI2NjIyYTMxYjY3NzQ4OGY2Y2RlY2E2OSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTkwNmEwMzZiYjIwNTFjMzA2OTZiNmI0MDIyNzdkYzkxZGEwZGY2NTI2NjIyYTMxYjY3NzQ4OGY2Y2RlY2E2OSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGZiYWNjODU2MDgzNjIxMjBjNDdjZjNmZWEzYzMyNTQzNzIwMGEzZjJmNDdhM2ZjYzQ2YWRkYjcxNzM3MTE0NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGZiYWNjODU2MDgzNjIxMjBjNDdjZjNmZWEzYzMyNTQzNzIwMGEzZjJmNDdhM2ZjYzQ2YWRkYjcxNzM3MTE0NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjFmMzQyMWYzNjhjMDFmYWU2NDg0NjI1ZDUwMGNlZDM0NmMxMzk3MzY0ZjY4ODA1MzA0NzRjMDNhMTFiYTI2MCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGE0ZjgyZmI1YmQ2NWQ4ZDc4YzUwZDdjYmE2NjQ1ZjgxYmRiMGU1ZTIyZTM0NDhhNGJmZGUxOGRmMTY1MDFiZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGE0ZjgyZmI1YmQ2NWQ4ZDc4YzUwZDdjYmE2NjQ1ZjgxYmRiMGU1ZTIyZTM0NDhhNGJmZGUxOGRmMTY1MDFiZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNThiYzlhZjIyMGIxMjRmYWZlNzJlYTkxNDg1NzIyZTkyZjI3ODE1OWU0MmRiYzQwOGUyNDFlYTYzMTQyNDU2MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTRkMGYwY2E3Njg0ZmQ3YmFhYTNiM2RmZTMyZmU3MzZiZGZiZjM0YWQzZTU1NmEwNWRiZTJlZGQyYTRmNjQ2YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTRkMGYwY2E3Njg0ZmQ3YmFhYTNiM2RmZTMyZmU3MzZiZGZiZjM0YWQzZTU1NmEwNWRiZTJlZGQyYTRmNjQ2YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTRkMGYwY2E3Njg0ZmQ3YmFhYTNiM2RmZTMyZmU3MzZiZGZiZjM0YWQzZTU1NmEwNWRiZTJlZGQyYTRmNjQ2YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjA3LTg5MjA5Y2UxIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM4LjAiLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTRkMGYwY2E3Njg0ZmQ3YmFhYTNiM2RmZTMyZmU3MzZiZGZiZjM0YWQzZTU1NmEwNWRiZTJlZGQyYTRmNjQ2YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjA3LTg5MjA5Y2UxIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIyNDQwNTAxIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGZiYWNjODU2MDgzNjIxMjBjNDdjZjNmZWEzYzMyNTQzNzIwMGEzZjJmNDdhM2ZjYzQ2YWRkYjcxNzM3MTE0NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGZiYWNjODU2MDgzNjIxMjBjNDdjZjNmZWEzYzMyNTQzNzIwMGEzZjJmNDdhM2ZjYzQ2YWRkYjcxNzM3MTE0NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjFmMzQyMWYzNjhjMDFmYWU2NDg0NjI1ZDUwMGNlZDM0NmMxMzk3MzY0ZjY4ODA1MzA0NzRjMDNhMTFiYTI2MCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, +  Replicas: 0, -  Replicas: 2, -  Replicas: &2, +  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, +  ResourceVersion: "", -  ResourceVersion: "1762438211810575014", -  ResourceVersion: "1762438402666095007", -  ResourceVersion: "1762438536310879014", -  ResourceVersion: "1762438577039951014", -  ResourceVersion: "1762438589660511014", -  ResourceVersion: "1762438732796335007", -  ResourceVersion: "1762438791034479014", -  ResourceVersion: "1762438875859999014", -  ResourceVersion: "1762438936987023014", -  ResourceVersion: "1762439056747407007", -  ResourceVersion: "1762439099222671014", -  ResourceVersion: "1762439332052831007", -  ResourceVersion: "1762439474604223007", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2025-11-06 14:09:38 +0000 UTC", -  Time: s"2025-11-06 14:10:11 +0000 UTC", -  Time: s"2025-11-06 14:13:22 +0000 UTC", -  Time: s"2025-11-06 14:15:17 +0000 UTC", -  Time: s"2025-11-06 14:15:36 +0000 UTC", -  Time: s"2025-11-06 14:15:42 +0000 UTC", -  Time: s"2025-11-06 14:16:17 +0000 UTC", -  Time: s"2025-11-06 14:16:28 +0000 UTC", -  Time: s"2025-11-06 14:16:29 +0000 UTC", -  Time: s"2025-11-06 14:16:30 +0000 UTC", -  Time: s"2025-11-06 14:18:52 +0000 UTC", -  Time: s"2025-11-06 14:19:03 +0000 UTC", -  Time: s"2025-11-06 14:19:51 +0000 UTC", -  Time: s"2025-11-06 14:20:41 +0000 UTC", -  Time: s"2025-11-06 14:21:15 +0000 UTC", -  Time: s"2025-11-06 14:21:41 +0000 UTC", -  Time: s"2025-11-06 14:22:16 +0000 UTC", -  Time: s"2025-11-06 14:24:16 +0000 UTC", -  Time: s"2025-11-06 14:24:26 +0000 UTC", -  Time: s"2025-11-06 14:24:59 +0000 UTC", -  Time: s"2025-11-06 14:26:20 +0000 UTC", -  Time: s"2025-11-06 14:28:52 +0000 UTC", -  Time: s"2025-11-06 14:28:58 +0000 UTC", -  Time: s"2025-11-06 14:31:14 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "21cb5511-80ae-4472-89b9-2fff3f627971", -  UID: "57eed8c0-2f45-42c8-b2eb-0b5c58ec7d1e", +  UpdatedReplicas: 0, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-proxysql-5c7478b868", -  UpdateRevision: "some-name-proxysql-648cc585b4", -  UpdateRevision: "some-name-proxysql-769d646477", -  UpdateRevision: "some-name-proxysql-7dcb9f548c", -  UpdateRevision: "some-name-proxysql-9b784db6", -  UpdateRevision: "some-name-proxysql-f7cdc9789", -  UpdateRevision: "some-name-pxc-5cdb7bb5f7", -  UpdateRevision: "some-name-pxc-686856678f", -  UpdateRevision: "some-name-pxc-7ddc55b7d4", -  UpdateRevision: "some-name-pxc-c994dc9", -  UpdateRevision: "some-name-pxc-d67bfd884", +  Value: "caching_sha2_password", -  Value: "mysql_native_password", -  VolumeMode: &"Filesystem", +  VolumeMode: nil, -  VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}},   }    },    },    {    },    },    {    },    }, ""),    },    {    },    },    },    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 3 identical elements    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical fields    "5",    ... // 5 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 8 identical fields    ... // 9 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Annotations: map[string]string{    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMapKeyRef: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    Containers: []v1.Container{    "d",    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...},    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    FieldPath: "metadata.name",    FieldPath: "metadata.namespace",    FieldRef: &v1.ObjectFieldSelector{    Finalizers: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostIP: "",    HostPort: 0,    ImagePullPolicy: "Always",    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    "kubectl.kubernetes.io/default-container": "proxysql",    "kubectl.kubernetes.io/default-container": "pxc",    Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: nil,    "last-applied-secret": strings.Join({    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-pxc"},    ManagedFields: nil,    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    {Name: "CLUSTER_HASH", Value: "2440501"},    Name: "config",    Name: "DEFAULT_AUTHENTICATION_PLUGIN",    Name: "ist",    {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"},    {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"},    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"},    {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"},    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}},    Name: "POD_NAME",    Name: "POD_NAMESPASE",    Name: "proxyadm",    {Name: "READINESS_CHECK_TIMEOUT", Value: "15"},    Namespace: "users-17636",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}},    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "f1f3421f368c01fae6484625d500ced346c1397364f6880530474c03a11ba260", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "54d0f0ca7684fd7baaa3b3dfe32fe736bdfbf34ad3e556a05dbe2edd2a4f646c", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "dc265c79-fcc1-43f6-961f-21344dc4be74", ...}},    OwnerReferences: nil,    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &2,    Replicas: &3,    ResizePolicy: nil,    ResourceFieldRef: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}},    SecretName: "internal-some-name",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    Value: "",    ValueFrom: nil,    ValueFrom: &v1.EnvVarSource{    VolumeAttributesClassName: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get pxc --all-namespaces -o wide + kubectl patch pxc -n users-17636 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Gt3IrA9HA0 ++ mktemp + local LAST_ERR=/tmp/tmp.owzbiWM5Bn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Gt3IrA9HA0 perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-17636 namespace + cat /tmp/tmp.owzbiWM5Bn + rm /tmp/tmp.Gt3IrA9HA0 /tmp/tmp.owzbiWM5Bn + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.EZhET6ObKN ++ mktemp + local LAST_ERR=/tmp/tmp.JVjynnjepn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EZhET6ObKN No resources found + cat /tmp/tmp.JVjynnjepn + rm /tmp/tmp.EZhET6ObKN /tmp/tmp.JVjynnjepn + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.rs07btn4BN ++ mktemp + local LAST_ERR=/tmp/tmp.x1qLiS9KB0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rs07btn4BN No resources found + cat /tmp/tmp.x1qLiS9KB0 + rm /tmp/tmp.rs07btn4BN /tmp/tmp.x1qLiS9KB0 + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.dCO2fAZWA0 ++ mktemp + local LAST_ERR=/tmp/tmp.g4ZAzFRtro + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dCO2fAZWA0 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.g4ZAzFRtro + rm /tmp/tmp.dCO2fAZWA0 /tmp/tmp.g4ZAzFRtro + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-17636 + rm -rf /tmp/tmp.WP88OxA47Z + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.mbkCkwWPuN + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.wBYmB6PzWg ++ mktemp + local LAST_ERR=/tmp/tmp.tt2qlW4QR9 + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.z4RmJWEGam + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-17636 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator