Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/logs/users-5-7.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-4487 + local ns=users-4487 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-8875 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ibPT80z9Oi ++ mktemp + local LAST_ERR=/tmp/tmp.aldKd4tXQI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ibPT80z9Oi perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-8875 namespace + cat /tmp/tmp.aldKd4tXQI + rm /tmp/tmp.ibPT80z9Oi /tmp/tmp.aldKd4tXQI + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.m3mhNpU2YW ++ mktemp + local LAST_ERR=/tmp/tmp.FHoYGjYxJH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.m3mhNpU2YW No resources found + cat /tmp/tmp.FHoYGjYxJH + rm /tmp/tmp.m3mhNpU2YW /tmp/tmp.FHoYGjYxJH + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.1uIehHKUoZ ++ mktemp + local LAST_ERR=/tmp/tmp.laPE7Tvdau + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1uIehHKUoZ No resources found + cat /tmp/tmp.laPE7Tvdau + rm /tmp/tmp.1uIehHKUoZ /tmp/tmp.laPE7Tvdau + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + awk '{print$1}' ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.Ijl5Q1yfi3 ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.M7lgsoU48s + local LAST_ERR=/tmp/tmp.JX67wJJ5mW + local exit_status=0 ++ mktemp + local LAST_ERR=/tmp/tmp.XpUymDM7nT + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M7lgsoU48s + cat /tmp/tmp.XpUymDM7nT + rm /tmp/tmp.M7lgsoU48s /tmp/tmp.XpUymDM7nT + return 0 namespace "users-8875" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ijl5Q1yfi3 namespace "pxc-operator" deleted + cat /tmp/tmp.JX67wJJ5mW + rm /tmp/tmp.Ijl5Q1yfi3 /tmp/tmp.JX67wJJ5mW + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.cynol3hEDp ++ mktemp + local LAST_ERR=/tmp/tmp.jJyIY9HZIs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cynol3hEDp namespace/pxc-operator created + cat /tmp/tmp.jJyIY9HZIs + rm /tmp/tmp.cynol3hEDp /tmp/tmp.jJyIY9HZIs + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.a77gheIlBO +++ mktemp ++ local LAST_ERR=/tmp/tmp.ipDKR0Fzo5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a77gheIlBO ++ cat /tmp/tmp.ipDKR0Fzo5 ++ rm /tmp/tmp.a77gheIlBO /tmp/tmp.ipDKR0Fzo5 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2207-89209ce1-16-cluster3 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.TBw6E4JYQE ++ mktemp + local LAST_ERR=/tmp/tmp.9WrfkkMPlO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2207-89209ce1-16-cluster3 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TBw6E4JYQE Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2207-89209ce1-16-cluster3" modified. + cat /tmp/tmp.9WrfkkMPlO + rm /tmp/tmp.TBw6E4JYQE /tmp/tmp.9WrfkkMPlO + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.NKwedf2DA7 ++ mktemp + local LAST_ERR=/tmp/tmp.yM0SXhVqfT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NKwedf2DA7 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.yM0SXhVqfT + rm /tmp/tmp.NKwedf2DA7 /tmp/tmp.yM0SXhVqfT + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.1uHCZsrgng ++ mktemp + local LAST_ERR=/tmp/tmp.eR3IzZPt8d + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1uHCZsrgng clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.eR3IzZPt8d + rm /tmp/tmp.1uHCZsrgng /tmp/tmp.eR3IzZPt8d + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2207-89209ce1^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + local LAST_OUT=/tmp/tmp.VsLULAvoW7 ++ mktemp + local LAST_ERR=/tmp/tmp.M0OToXJvPn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VsLULAvoW7 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.M0OToXJvPn + rm /tmp/tmp.VsLULAvoW7 /tmp/tmp.M0OToXJvPn + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.dFKYMEF9Yy ++ mktemp + local LAST_ERR=/tmp/tmp.eYtKDFNCl1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dFKYMEF9Yy pod/percona-xtradb-cluster-operator-58dd9fd94c-xtqwz condition met + cat /tmp/tmp.eYtKDFNCl1 + rm /tmp/tmp.dFKYMEF9Yy /tmp/tmp.eYtKDFNCl1 + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.G4wtpo2G6j +++ mktemp ++ local LAST_ERR=/tmp/tmp.7ykgV9oYxh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G4wtpo2G6j ++ cat /tmp/tmp.7ykgV9oYxh ++ rm /tmp/tmp.G4wtpo2G6j /tmp/tmp.7ykgV9oYxh ++ return 0 + wait_pod percona-xtradb-cluster-operator-58dd9fd94c-xtqwz 480 pxc-operator + local pod=percona-xtradb-cluster-operator-58dd9fd94c-xtqwz + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-58dd9fd94c-xtqwz ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-58dd9fd94c-xtqwz condition met waiting for pod/percona-xtradb-cluster-operator-58dd9fd94c-xtqwz to become Ready.Ok + sleep 3 + create_namespace users-4487 + local namespace=users-4487 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces users-4487' + set +o xtrace ++ mktemp ----------------------------------------------------------------------------------- cleaned up old namespaces users-4487 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-4487 ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.0OtSbK9I12 + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + local LAST_OUT=/tmp/tmp.jVNJJfXbA3 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.Bl5Kt2hAww + local exit_status=0 + local LAST_ERR=/tmp/tmp.hWYnwNQBsH + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-4487 + for i in '$(seq 0 2)' + set +e + kubectl get ns + awk '{print$1}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0OtSbK9I12 + cat /tmp/tmp.hWYnwNQBsH + rm /tmp/tmp.0OtSbK9I12 /tmp/tmp.hWYnwNQBsH + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-4487 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-4487 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.jVNJJfXbA3 + cat /tmp/tmp.Bl5Kt2hAww Error from server (NotFound): namespaces "users-4487" not found + rm /tmp/tmp.jVNJJfXbA3 /tmp/tmp.Bl5Kt2hAww + return 1 + : + wait_for_delete namespace/users-4487 + local res=namespace/users-4487 + echo -n 'waiting for namespace/users-4487 to be deleted' waiting for namespace/users-4487 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-4487" not found + desc 'create namespace users-4487' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-4487 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-4487 ++ mktemp + local LAST_OUT=/tmp/tmp.6bgOY1k6YI ++ mktemp + local LAST_ERR=/tmp/tmp.ThPkvOzwW8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-4487 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6bgOY1k6YI namespace/users-4487 created + cat /tmp/tmp.ThPkvOzwW8 + rm /tmp/tmp.6bgOY1k6YI /tmp/tmp.ThPkvOzwW8 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.uNebV2fGUj +++ mktemp ++ local LAST_ERR=/tmp/tmp.64aLawHSle ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uNebV2fGUj ++ cat /tmp/tmp.64aLawHSle ++ rm /tmp/tmp.uNebV2fGUj /tmp/tmp.64aLawHSle ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2207-89209ce1-16-cluster3 --namespace=users-4487 ++ mktemp + local LAST_OUT=/tmp/tmp.gMNjPnPUTd ++ mktemp + local LAST_ERR=/tmp/tmp.21BNkZHbQc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2207-89209ce1-16-cluster3 --namespace=users-4487 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gMNjPnPUTd Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2207-89209ce1-16-cluster3" modified. + cat /tmp/tmp.21BNkZHbQc + rm /tmp/tmp.gMNjPnPUTd /tmp/tmp.21BNkZHbQc + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.JViCVCve0H ++ mktemp + local LAST_ERR=/tmp/tmp.lReGAZMrmi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JViCVCve0H secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.lReGAZMrmi + rm /tmp/tmp.JViCVCve0H /tmp/tmp.lReGAZMrmi + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ base64 ++ echo -n test-password + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.lv3I5mwp5j ++ mktemp + local LAST_ERR=/tmp/tmp.UVAkrOhTrb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lv3I5mwp5j secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.UVAkrOhTrb + rm /tmp/tmp.lv3I5mwp5j /tmp/tmp.UVAkrOhTrb + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-4487~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2207-89209ce1#' ++ mktemp + local LAST_OUT=/tmp/tmp.V5YphHlapj + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + local LAST_ERR=/tmp/tmp.9379VSm0qC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.V5YphHlapj deployment.apps/pxc-client created + cat /tmp/tmp.9379VSm0qC + rm /tmp/tmp.V5YphHlapj /tmp/tmp.9379VSm0qC + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.DObK1Vjaj0 + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2207-89209ce1#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-4487~ + local LAST_ERR=/tmp/tmp.EYFj6gtSU6 + local exit_status=0 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#apply:.*#apply: Never#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DObK1Vjaj0 perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.EYFj6gtSU6 + rm /tmp/tmp.DObK1Vjaj0 /tmp/tmp.EYFj6gtSU6 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.bFLhi9zZzQ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.vy40np1mJC +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.bFLhi9zZzQ +++ cat /tmp/tmp.vy40np1mJC +++ rm /tmp/tmp.bFLhi9zZzQ /tmp/tmp.vy40np1mJC +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.BZFDCOWaAW ++++ mktemp +++ local LAST_ERR=/tmp/tmp.gTeEe4nsp4 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.BZFDCOWaAW +++ cat /tmp/tmp.gTeEe4nsp4 +++ rm /tmp/tmp.BZFDCOWaAW /tmp/tmp.gTeEe4nsp4 +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-4487 ++ mktemp + local LAST_OUT=/tmp/tmp.ALpqpCA3d0 ++ mktemp + local LAST_ERR=/tmp/tmp.GjOqMgRVIy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-4487 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-4487 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-4487 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.ALpqpCA3d0 + cat /tmp/tmp.GjOqMgRVIy error: no matching resources found + rm /tmp/tmp.ALpqpCA3d0 /tmp/tmp.GjOqMgRVIy + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.m3SMlg5Mxg +++ mktemp ++ local LAST_ERR=/tmp/tmp.HWyGCTbTkf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m3SMlg5Mxg ++ cat /tmp/tmp.HWyGCTbTkf ++ rm /tmp/tmp.m3SMlg5Mxg /tmp/tmp.HWyGCTbTkf ++ return 0 + local 'root_pass=[3@2IJ=l9W7#hfSF' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9ZGKyTyipX +++ mktemp ++ local LAST_ERR=/tmp/tmp.vI82piuf7z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9ZGKyTyipX ++ cat /tmp/tmp.vI82piuf7z Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.9ZGKyTyipX /tmp/tmp.vI82piuf7z ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6heoKBMTr8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0lYpy2yD3E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6heoKBMTr8 ++ cat /tmp/tmp.0lYpy2yD3E ++ rm /tmp/tmp.6heoKBMTr8 /tmp/tmp.0lYpy2yD3E ++ return 0 + client_pod=pxc-client-857d976497-jcvp4 + wait_pod pxc-client-857d976497-jcvp4 + local pod=pxc-client-857d976497-jcvp4 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jcvp4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jcvp4 condition met waiting for pod/pxc-client-857d976497-jcvp4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MJvzX2anDP +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ihdsq6pkE9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MJvzX2anDP ++ cat /tmp/tmp.Ihdsq6pkE9 ++ rm /tmp/tmp.MJvzX2anDP /tmp/tmp.Ihdsq6pkE9 ++ return 0 + client_pod=pxc-client-857d976497-jcvp4 + wait_pod pxc-client-857d976497-jcvp4 + local pod=pxc-client-857d976497-jcvp4 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jcvp4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jcvp4 condition met waiting for pod/pxc-client-857d976497-jcvp4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8RdGAv3k7v +++ mktemp ++ local LAST_ERR=/tmp/tmp.h3tlmKqyHM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8RdGAv3k7v ++ cat /tmp/tmp.h3tlmKqyHM ++ rm /tmp/tmp.8RdGAv3k7v /tmp/tmp.h3tlmKqyHM ++ return 0 + client_pod=pxc-client-857d976497-jcvp4 + wait_pod pxc-client-857d976497-jcvp4 + local pod=pxc-client-857d976497-jcvp4 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jcvp4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jcvp4 condition met waiting for pod/pxc-client-857d976497-jcvp4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.aKabmIgnk5/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-1.sql /tmp/tmp.aKabmIgnk5/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n8Ry7VsiEd +++ mktemp ++ local LAST_ERR=/tmp/tmp.OBzj2dd7Qz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n8Ry7VsiEd ++ cat /tmp/tmp.OBzj2dd7Qz ++ rm /tmp/tmp.n8Ry7VsiEd /tmp/tmp.OBzj2dd7Qz ++ return 0 + client_pod=pxc-client-857d976497-jcvp4 + wait_pod pxc-client-857d976497-jcvp4 + local pod=pxc-client-857d976497-jcvp4 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jcvp4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jcvp4 condition met waiting for pod/pxc-client-857d976497-jcvp4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.aKabmIgnk5/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-1.sql /tmp/tmp.aKabmIgnk5/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''[3@2IJ=l9W7#hfSF'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QAlvq9ZJcO +++ mktemp ++ local LAST_ERR=/tmp/tmp.3BoKpAME2L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QAlvq9ZJcO ++ cat /tmp/tmp.3BoKpAME2L ++ rm /tmp/tmp.QAlvq9ZJcO /tmp/tmp.3BoKpAME2L ++ return 0 + client_pod=pxc-client-857d976497-jcvp4 + wait_pod pxc-client-857d976497-jcvp4 + local pod=pxc-client-857d976497-jcvp4 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jcvp4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jcvp4 condition met waiting for pod/pxc-client-857d976497-jcvp4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.aKabmIgnk5/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-1.sql /tmp/tmp.aKabmIgnk5/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iKHBAdBLXE +++ mktemp ++ local LAST_ERR=/tmp/tmp.BjNU7xPo8T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iKHBAdBLXE ++ cat /tmp/tmp.BjNU7xPo8T Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.iKHBAdBLXE /tmp/tmp.BjNU7xPo8T ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.K80f1RL23X +++ mktemp ++ local LAST_ERR=/tmp/tmp.fJeIyIjhcd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K80f1RL23X ++ cat /tmp/tmp.fJeIyIjhcd ++ rm /tmp/tmp.K80f1RL23X /tmp/tmp.fJeIyIjhcd ++ return 0 + secret_pass='[3@2IJ=l9W7#hfSF' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qut3qzKK0n +++ mktemp ++ local LAST_ERR=/tmp/tmp.wmbcnlBxpH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qut3qzKK0n ++ cat /tmp/tmp.wmbcnlBxpH ++ rm /tmp/tmp.Qut3qzKK0n /tmp/tmp.wmbcnlBxpH ++ return 0 + int_secret_pass='[3@2IJ=l9W7#hfSF' + [[ -z [3@2IJ=l9W7#hfSF ]] + [[ [3@2IJ=l9W7#hfSF != \[\3\@\2\I\J\=\l\9\W\7\#\h\f\S\F ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''[3@2IJ=l9W7#hfSF'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''[3@2IJ=l9W7#hfSF'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''[3@2IJ=l9W7#hfSF'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''[3@2IJ=l9W7#hfSF'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mS8M3UTioM +++ mktemp ++ local LAST_ERR=/tmp/tmp.j4Zo7hP8gP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mS8M3UTioM ++ cat /tmp/tmp.j4Zo7hP8gP ++ rm /tmp/tmp.mS8M3UTioM /tmp/tmp.j4Zo7hP8gP ++ return 0 + client_pod=pxc-client-857d976497-jcvp4 + wait_pod pxc-client-857d976497-jcvp4 + local pod=pxc-client-857d976497-jcvp4 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jcvp4 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-jcvp4 condition met waiting for pod/pxc-client-857d976497-jcvp4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.aKabmIgnk5/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4.sql /tmp/tmp.aKabmIgnk5/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Bfd2bEnPwD +++ mktemp ++ local LAST_ERR=/tmp/tmp.OvFVehoKYE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Bfd2bEnPwD ++ cat /tmp/tmp.OvFVehoKYE ++ rm /tmp/tmp.Bfd2bEnPwD /tmp/tmp.OvFVehoKYE ++ return 0 + secret_pass='V_xWJClt.4ZS'\'';' '-h some-name-pxc -uroot -p'\''.4ZS'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''.4ZS'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''.4ZS'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LdOidKrcMM +++ mktemp ++ local LAST_ERR=/tmp/tmp.IqxI0lr8wq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LdOidKrcMM ++ cat /tmp/tmp.IqxI0lr8wq ++ rm /tmp/tmp.LdOidKrcMM /tmp/tmp.IqxI0lr8wq ++ return 0 + client_pod=pxc-client-857d976497-jcvp4 + wait_pod pxc-client-857d976497-jcvp4 + local pod=pxc-client-857d976497-jcvp4 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jcvp4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jcvp4 condition met waiting for pod/pxc-client-857d976497-jcvp4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''.4ZS'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''.4ZS'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''.4ZS'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''.4ZS'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XbjVhsu9ni +++ mktemp ++ local LAST_ERR=/tmp/tmp.BCFTgCCDYK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XbjVhsu9ni ++ cat /tmp/tmp.BCFTgCCDYK ++ rm /tmp/tmp.XbjVhsu9ni /tmp/tmp.BCFTgCCDYK ++ return 0 + client_pod=pxc-client-857d976497-jcvp4 + wait_pod pxc-client-857d976497-jcvp4 + local pod=pxc-client-857d976497-jcvp4 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jcvp4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jcvp4 condition met waiting for pod/pxc-client-857d976497-jcvp4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.aKabmIgnk5/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4.sql /tmp/tmp.aKabmIgnk5/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.a5Bz5NEQQs +++ mktemp ++ local LAST_ERR=/tmp/tmp.3HEOwQd6f8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a5Bz5NEQQs ++ cat /tmp/tmp.3HEOwQd6f8 ++ rm /tmp/tmp.a5Bz5NEQQs /tmp/tmp.3HEOwQd6f8 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.LZOkBW6qvM ++ mktemp + local LAST_ERR=/tmp/tmp.azXE38eTuc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LZOkBW6qvM secret/my-cluster-secrets-2 configured + cat /tmp/tmp.azXE38eTuc Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.LZOkBW6qvM /tmp/tmp.azXE38eTuc + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zK5ipwODEN +++ mktemp ++ local LAST_ERR=/tmp/tmp.iifAH7CHRh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zK5ipwODEN ++ cat /tmp/tmp.iifAH7CHRh ++ rm /tmp/tmp.zK5ipwODEN /tmp/tmp.iifAH7CHRh ++ return 0 + client_pod=pxc-client-857d976497-jcvp4 + wait_pod pxc-client-857d976497-jcvp4 + local pod=pxc-client-857d976497-jcvp4 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jcvp4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jcvp4 condition met waiting for pod/pxc-client-857d976497-jcvp4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.aKabmIgnk5/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-4.sql /tmp/tmp.aKabmIgnk5/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + local LAST_OUT=/tmp/tmp.9aGmVd8q5V + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-4487~ + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2207-89209ce1#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.gaFojtbrb3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9aGmVd8q5V perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.gaFojtbrb3 + rm /tmp/tmp.9aGmVd8q5V /tmp/tmp.gaFojtbrb3 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w6BqzpnvUi +++ mktemp ++ local LAST_ERR=/tmp/tmp.DjXMYhfZeM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w6BqzpnvUi ++ cat /tmp/tmp.DjXMYhfZeM ++ rm /tmp/tmp.w6BqzpnvUi /tmp/tmp.DjXMYhfZeM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rYxUxwRfpo +++ mktemp ++ local LAST_ERR=/tmp/tmp.OMWVuQHdA4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rYxUxwRfpo ++ cat /tmp/tmp.OMWVuQHdA4 ++ rm /tmp/tmp.rYxUxwRfpo /tmp/tmp.OMWVuQHdA4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z1ypCxmW3k +++ mktemp ++ local LAST_ERR=/tmp/tmp.hNrk6zF1oi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z1ypCxmW3k ++ cat /tmp/tmp.hNrk6zF1oi ++ rm /tmp/tmp.z1ypCxmW3k /tmp/tmp.hNrk6zF1oi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.beEjdl49Mv +++ mktemp ++ local LAST_ERR=/tmp/tmp.eC0ZGzh4vC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.beEjdl49Mv ++ cat /tmp/tmp.eC0ZGzh4vC ++ rm /tmp/tmp.beEjdl49Mv /tmp/tmp.eC0ZGzh4vC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.40E2hoPhZn +++ mktemp ++ local LAST_ERR=/tmp/tmp.s2pdlFOCMd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.40E2hoPhZn ++ cat /tmp/tmp.s2pdlFOCMd ++ rm /tmp/tmp.40E2hoPhZn /tmp/tmp.s2pdlFOCMd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZeQe4r37Mx +++ mktemp ++ local LAST_ERR=/tmp/tmp.gPTlXy83Xo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZeQe4r37Mx ++ cat /tmp/tmp.gPTlXy83Xo ++ rm /tmp/tmp.ZeQe4r37Mx /tmp/tmp.gPTlXy83Xo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KTr2PO2uv6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sDq6vIBxvd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KTr2PO2uv6 ++ cat /tmp/tmp.sDq6vIBxvd ++ rm /tmp/tmp.KTr2PO2uv6 /tmp/tmp.sDq6vIBxvd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.02UwntGFjc +++ mktemp ++ local LAST_ERR=/tmp/tmp.OfLYctsvNr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.02UwntGFjc ++ cat /tmp/tmp.OfLYctsvNr ++ rm /tmp/tmp.02UwntGFjc /tmp/tmp.OfLYctsvNr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qdjd0Bfk2Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.jLV8Ai09TY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qdjd0Bfk2Y ++ cat /tmp/tmp.jLV8Ai09TY ++ rm /tmp/tmp.Qdjd0Bfk2Y /tmp/tmp.jLV8Ai09TY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XDV5P9GOTQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.9JER90d366 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XDV5P9GOTQ ++ cat /tmp/tmp.9JER90d366 ++ rm /tmp/tmp.XDV5P9GOTQ /tmp/tmp.9JER90d366 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fHZyuHOm8D +++ mktemp ++ local LAST_ERR=/tmp/tmp.V2QWPUFCLz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fHZyuHOm8D ++ cat /tmp/tmp.V2QWPUFCLz ++ rm /tmp/tmp.fHZyuHOm8D /tmp/tmp.V2QWPUFCLz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bPgUBhXGgj +++ mktemp ++ local LAST_ERR=/tmp/tmp.wNOKJuThTB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bPgUBhXGgj ++ cat /tmp/tmp.wNOKJuThTB ++ rm /tmp/tmp.bPgUBhXGgj /tmp/tmp.wNOKJuThTB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Ut7zOF3S1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.F26pFZMt2L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4Ut7zOF3S1 ++ cat /tmp/tmp.F26pFZMt2L ++ rm /tmp/tmp.4Ut7zOF3S1 /tmp/tmp.F26pFZMt2L ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9My2qlQxEJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.C5RyqKgGrY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9My2qlQxEJ ++ cat /tmp/tmp.C5RyqKgGrY ++ rm /tmp/tmp.9My2qlQxEJ /tmp/tmp.C5RyqKgGrY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LLCuBdtsJh +++ mktemp ++ local LAST_ERR=/tmp/tmp.rap9flBhz3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LLCuBdtsJh ++ cat /tmp/tmp.rap9flBhz3 ++ rm /tmp/tmp.LLCuBdtsJh /tmp/tmp.rap9flBhz3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nyPhtnvYWW +++ mktemp ++ local LAST_ERR=/tmp/tmp.aNOFCKcCCz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nyPhtnvYWW ++ cat /tmp/tmp.aNOFCKcCCz ++ rm /tmp/tmp.nyPhtnvYWW /tmp/tmp.aNOFCKcCCz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1jC6HGRGbG +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z0VwhfXO9w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1jC6HGRGbG ++ cat /tmp/tmp.Z0VwhfXO9w ++ rm /tmp/tmp.1jC6HGRGbG /tmp/tmp.Z0VwhfXO9w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qx5gyKrOEx +++ mktemp ++ local LAST_ERR=/tmp/tmp.4mJC2GunN9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qx5gyKrOEx ++ cat /tmp/tmp.4mJC2GunN9 ++ rm /tmp/tmp.Qx5gyKrOEx /tmp/tmp.4mJC2GunN9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ezqneoGkut +++ mktemp ++ local LAST_ERR=/tmp/tmp.KzDhfad2mr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ezqneoGkut ++ cat /tmp/tmp.KzDhfad2mr ++ rm /tmp/tmp.ezqneoGkut /tmp/tmp.KzDhfad2mr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WARCRuWMjQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.2QxTyncf8Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WARCRuWMjQ ++ cat /tmp/tmp.2QxTyncf8Y ++ rm /tmp/tmp.WARCRuWMjQ /tmp/tmp.2QxTyncf8Y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lrRmWycW82 +++ mktemp ++ local LAST_ERR=/tmp/tmp.X7Yz3whzEY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lrRmWycW82 ++ cat /tmp/tmp.X7Yz3whzEY ++ rm /tmp/tmp.lrRmWycW82 /tmp/tmp.X7Yz3whzEY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DwMYc051Fn +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ek5hZX9nIs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DwMYc051Fn ++ cat /tmp/tmp.Ek5hZX9nIs ++ rm /tmp/tmp.DwMYc051Fn /tmp/tmp.Ek5hZX9nIs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c7dpHMmGTi +++ mktemp ++ local LAST_ERR=/tmp/tmp.GGtdb7aJk6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c7dpHMmGTi ++ cat /tmp/tmp.GGtdb7aJk6 ++ rm /tmp/tmp.c7dpHMmGTi /tmp/tmp.GGtdb7aJk6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rMc4qySNKH +++ mktemp ++ local LAST_ERR=/tmp/tmp.KTvWvarQAj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rMc4qySNKH ++ cat /tmp/tmp.KTvWvarQAj ++ rm /tmp/tmp.rMc4qySNKH /tmp/tmp.KTvWvarQAj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TgrvUZLwd2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.iZD9kkWFtI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TgrvUZLwd2 ++ cat /tmp/tmp.iZD9kkWFtI ++ rm /tmp/tmp.TgrvUZLwd2 /tmp/tmp.iZD9kkWFtI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VgsyDw6SMh +++ mktemp ++ local LAST_ERR=/tmp/tmp.POTNfEMDBp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VgsyDw6SMh ++ cat /tmp/tmp.POTNfEMDBp ++ rm /tmp/tmp.VgsyDw6SMh /tmp/tmp.POTNfEMDBp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rr8YruJWSb +++ mktemp ++ local LAST_ERR=/tmp/tmp.Aq72ArtzN2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rr8YruJWSb ++ cat /tmp/tmp.Aq72ArtzN2 ++ rm /tmp/tmp.rr8YruJWSb /tmp/tmp.Aq72ArtzN2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wokmMrDDhq +++ mktemp ++ local LAST_ERR=/tmp/tmp.vUYY5DwVbn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wokmMrDDhq ++ cat /tmp/tmp.vUYY5DwVbn ++ rm /tmp/tmp.wokmMrDDhq /tmp/tmp.vUYY5DwVbn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lj9NXsqxpm +++ mktemp ++ local LAST_ERR=/tmp/tmp.ohXzrc9Vma ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lj9NXsqxpm ++ cat /tmp/tmp.ohXzrc9Vma ++ rm /tmp/tmp.lj9NXsqxpm /tmp/tmp.ohXzrc9Vma ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vESAAkGT5K +++ mktemp ++ local LAST_ERR=/tmp/tmp.kW41kJMZYV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vESAAkGT5K ++ cat /tmp/tmp.kW41kJMZYV ++ rm /tmp/tmp.vESAAkGT5K /tmp/tmp.kW41kJMZYV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uPWodXA7i0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ucdsmkEXBG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uPWodXA7i0 ++ cat /tmp/tmp.ucdsmkEXBG ++ rm /tmp/tmp.uPWodXA7i0 /tmp/tmp.ucdsmkEXBG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lilLsh0MVe +++ mktemp ++ local LAST_ERR=/tmp/tmp.U1Pgu0ntac ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lilLsh0MVe ++ cat /tmp/tmp.U1Pgu0ntac ++ rm /tmp/tmp.lilLsh0MVe /tmp/tmp.U1Pgu0ntac ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ogC9P6JNuf +++ mktemp ++ local LAST_ERR=/tmp/tmp.xPImBAbHNo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ogC9P6JNuf ++ cat /tmp/tmp.xPImBAbHNo ++ rm /tmp/tmp.ogC9P6JNuf /tmp/tmp.xPImBAbHNo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1Oa3Vhejw4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lg8tIurSq3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1Oa3Vhejw4 ++ cat /tmp/tmp.lg8tIurSq3 ++ rm /tmp/tmp.1Oa3Vhejw4 /tmp/tmp.lg8tIurSq3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9zNNuMCTqv +++ mktemp ++ local LAST_ERR=/tmp/tmp.FbAD874s04 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9zNNuMCTqv ++ cat /tmp/tmp.FbAD874s04 ++ rm /tmp/tmp.9zNNuMCTqv /tmp/tmp.FbAD874s04 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KWKbDZtUfX +++ mktemp ++ local LAST_ERR=/tmp/tmp.jHRuftgP8L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KWKbDZtUfX ++ cat /tmp/tmp.jHRuftgP8L ++ rm /tmp/tmp.KWKbDZtUfX /tmp/tmp.jHRuftgP8L ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N70y9TzcUN +++ mktemp ++ local LAST_ERR=/tmp/tmp.ASIhMWnauc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N70y9TzcUN ++ cat /tmp/tmp.ASIhMWnauc ++ rm /tmp/tmp.N70y9TzcUN /tmp/tmp.ASIhMWnauc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2TdJPSVN0p +++ mktemp ++ local LAST_ERR=/tmp/tmp.t4RxmauO7m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2TdJPSVN0p ++ cat /tmp/tmp.t4RxmauO7m ++ rm /tmp/tmp.2TdJPSVN0p /tmp/tmp.t4RxmauO7m ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q9p5UueLBH +++ mktemp ++ local LAST_ERR=/tmp/tmp.onKWSaaHAO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q9p5UueLBH ++ cat /tmp/tmp.onKWSaaHAO ++ rm /tmp/tmp.Q9p5UueLBH /tmp/tmp.onKWSaaHAO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EpavXVr0K7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HUaPtiZ7p5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EpavXVr0K7 ++ cat /tmp/tmp.HUaPtiZ7p5 ++ rm /tmp/tmp.EpavXVr0K7 /tmp/tmp.HUaPtiZ7p5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z5zD3OjAts +++ mktemp ++ local LAST_ERR=/tmp/tmp.vQGtPox1HP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z5zD3OjAts ++ cat /tmp/tmp.vQGtPox1HP ++ rm /tmp/tmp.Z5zD3OjAts /tmp/tmp.vQGtPox1HP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6c6MRWtW0Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.kLH7NNdXgv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6c6MRWtW0Y ++ cat /tmp/tmp.kLH7NNdXgv ++ rm /tmp/tmp.6c6MRWtW0Y /tmp/tmp.kLH7NNdXgv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2WTAPgiiVR +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hv8QqOAD3u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2WTAPgiiVR ++ cat /tmp/tmp.Hv8QqOAD3u ++ rm /tmp/tmp.2WTAPgiiVR /tmp/tmp.Hv8QqOAD3u ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oSITpbbWU7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ih8DY1nEyu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oSITpbbWU7 ++ cat /tmp/tmp.Ih8DY1nEyu ++ rm /tmp/tmp.oSITpbbWU7 /tmp/tmp.Ih8DY1nEyu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Get7Cv2e3I +++ mktemp ++ local LAST_ERR=/tmp/tmp.jmRedxAFpR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Get7Cv2e3I ++ cat /tmp/tmp.jmRedxAFpR ++ rm /tmp/tmp.Get7Cv2e3I /tmp/tmp.jmRedxAFpR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DtPAoruALi +++ mktemp ++ local LAST_ERR=/tmp/tmp.eX5jCoPsap ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DtPAoruALi ++ cat /tmp/tmp.eX5jCoPsap ++ rm /tmp/tmp.DtPAoruALi /tmp/tmp.eX5jCoPsap ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xtrc1PwGZ0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6amSjRRu7L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xtrc1PwGZ0 ++ cat /tmp/tmp.6amSjRRu7L ++ rm /tmp/tmp.xtrc1PwGZ0 /tmp/tmp.6amSjRRu7L ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rqSTxPXCEb +++ mktemp ++ local LAST_ERR=/tmp/tmp.ePheqajG2Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rqSTxPXCEb ++ cat /tmp/tmp.ePheqajG2Y ++ rm /tmp/tmp.rqSTxPXCEb /tmp/tmp.ePheqajG2Y ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.iWZ6Er0IhD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.53cUAVyeYc +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.iWZ6Er0IhD +++++ cat /tmp/tmp.53cUAVyeYc +++++ rm /tmp/tmp.iWZ6Er0IhD /tmp/tmp.53cUAVyeYc +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7PI1tzp9pu +++ mktemp ++ local LAST_ERR=/tmp/tmp.D1fSHLRZ7z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7PI1tzp9pu ++ cat /tmp/tmp.D1fSHLRZ7z ++ rm /tmp/tmp.7PI1tzp9pu /tmp/tmp.D1fSHLRZ7z ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 2 haproxy some-name + local generation=2 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zDf6BustiH +++ mktemp ++ local LAST_ERR=/tmp/tmp.Tv1FAfDh8D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zDf6BustiH ++ cat /tmp/tmp.Tv1FAfDh8D ++ rm /tmp/tmp.zDf6BustiH /tmp/tmp.Tv1FAfDh8D ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.a05VyhfK1Y ++ mktemp + local LAST_ERR=/tmp/tmp.xX6x4dtuba + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.a05VyhfK1Y secret/my-cluster-secrets patched + cat /tmp/tmp.xX6x4dtuba + rm /tmp/tmp.a05VyhfK1Y /tmp/tmp.xX6x4dtuba + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eg7LAG4VGL +++ mktemp ++ local LAST_ERR=/tmp/tmp.AUFwtXwj5Z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eg7LAG4VGL ++ cat /tmp/tmp.AUFwtXwj5Z ++ rm /tmp/tmp.eg7LAG4VGL /tmp/tmp.AUFwtXwj5Z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PjKLZYeGOY +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hhm91oiL56 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PjKLZYeGOY ++ cat /tmp/tmp.Hhm91oiL56 ++ rm /tmp/tmp.PjKLZYeGOY /tmp/tmp.Hhm91oiL56 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oJFvsU4Dlw +++ mktemp ++ local LAST_ERR=/tmp/tmp.grTS8J75UL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oJFvsU4Dlw ++ cat /tmp/tmp.grTS8J75UL ++ rm /tmp/tmp.oJFvsU4Dlw /tmp/tmp.grTS8J75UL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m1aYjzT00c +++ mktemp ++ local LAST_ERR=/tmp/tmp.SV4j4CBcsP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m1aYjzT00c ++ cat /tmp/tmp.SV4j4CBcsP ++ rm /tmp/tmp.m1aYjzT00c /tmp/tmp.SV4j4CBcsP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DpPoSv1Oa6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DgSPbDdFOE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DpPoSv1Oa6 ++ cat /tmp/tmp.DgSPbDdFOE ++ rm /tmp/tmp.DpPoSv1Oa6 /tmp/tmp.DgSPbDdFOE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.viFsF3dPYZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.RIEyLuziAo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.viFsF3dPYZ ++ cat /tmp/tmp.RIEyLuziAo ++ rm /tmp/tmp.viFsF3dPYZ /tmp/tmp.RIEyLuziAo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eExgfnjymJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.EmpPp3y3yf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eExgfnjymJ ++ cat /tmp/tmp.EmpPp3y3yf ++ rm /tmp/tmp.eExgfnjymJ /tmp/tmp.EmpPp3y3yf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l8pCXEfFrf +++ mktemp ++ local LAST_ERR=/tmp/tmp.sbFON35UU5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l8pCXEfFrf ++ cat /tmp/tmp.sbFON35UU5 ++ rm /tmp/tmp.l8pCXEfFrf /tmp/tmp.sbFON35UU5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ovgB62f3Ci +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZFprkdj5o5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ovgB62f3Ci ++ cat /tmp/tmp.ZFprkdj5o5 ++ rm /tmp/tmp.ovgB62f3Ci /tmp/tmp.ZFprkdj5o5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AZQSFDPNyG +++ mktemp ++ local LAST_ERR=/tmp/tmp.QRrnXZn1gp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AZQSFDPNyG ++ cat /tmp/tmp.QRrnXZn1gp ++ rm /tmp/tmp.AZQSFDPNyG /tmp/tmp.QRrnXZn1gp ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BdzyhrWUtP +++ mktemp ++ local LAST_ERR=/tmp/tmp.7BeMABbSjm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BdzyhrWUtP ++ cat /tmp/tmp.7BeMABbSjm ++ rm /tmp/tmp.BdzyhrWUtP /tmp/tmp.7BeMABbSjm ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.pl3yMGnpwC ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.sLFK5NLS8n +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.pl3yMGnpwC +++++ cat /tmp/tmp.sLFK5NLS8n +++++ rm /tmp/tmp.pl3yMGnpwC /tmp/tmp.sLFK5NLS8n +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mo8kCFGlso +++ mktemp ++ local LAST_ERR=/tmp/tmp.3eue0881GO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Mo8kCFGlso ++ cat /tmp/tmp.3eue0881GO ++ rm /tmp/tmp.Mo8kCFGlso /tmp/tmp.3eue0881GO ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-3-57.sql ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ojMc1YCldZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.HEL4DuHFSt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ojMc1YCldZ ++ cat /tmp/tmp.HEL4DuHFSt ++ rm /tmp/tmp.ojMc1YCldZ /tmp/tmp.HEL4DuHFSt ++ return 0 + client_pod=pxc-client-857d976497-jcvp4 + wait_pod pxc-client-857d976497-jcvp4 + local pod=pxc-client-857d976497-jcvp4 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jcvp4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jcvp4 condition met waiting for pod/pxc-client-857d976497-jcvp4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.aKabmIgnk5/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2207/e2e-tests/users/compare/select-3.sql /tmp/tmp.aKabmIgnk5/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 3 haproxy some-name + local generation=3 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F1MLGar9zd +++ mktemp ++ local LAST_ERR=/tmp/tmp.sTk0MZp3fX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F1MLGar9zd ++ cat /tmp/tmp.sTk0MZp3fX ++ rm /tmp/tmp.F1MLGar9zd /tmp/tmp.sTk0MZp3fX ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + destroy users-4487 + local namespace=users-4487 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' ++ get_operator_pod + grep -v 'get backup status: Job.batch' ++ local label_prefix=app.kubernetes.io/ + grep -v level=info + sort -u + tee /tmp/tmp.aKabmIgnk5/operator.log + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.IMQxRle8cb +++ mktemp ++ local LAST_ERR=/tmp/tmp.meuPNVpLWi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IMQxRle8cb ++ cat /tmp/tmp.meuPNVpLWi ++ rm /tmp/tmp.IMQxRle8cb /tmp/tmp.meuPNVpLWi ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-58dd9fd94c-xtqwz ++ mktemp + local LAST_OUT=/tmp/tmp.vEYiPojqsW ++ mktemp + local LAST_ERR=/tmp/tmp.h9xj16ZOm3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-58dd9fd94c-xtqwz + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vEYiPojqsW + cat /tmp/tmp.h9xj16ZOm3 + rm /tmp/tmp.vEYiPojqsW /tmp/tmp.h9xj16ZOm3 + return 0 } }, }, { }, }, { }, }, ""), }, { }, }, }, - }, - { - }, - { - }, - }, + }, ... // 16 identical fields ... // 16 identical fields - "1ce0489e43e4ff42c6b360c482e0a0f63ce438cc7b1f7638f45eab08637eb4c4", + "1ce0489e43e4ff42c6b360c482e0a0f63ce438cc7b1f7638f45eab08637eb4c4", "2", 2025-11-06T14:03:57.510Z INFO setup Manager starting up {"gitCommit": "89209ce179be0afd4246cdcc6b564d43f706c45f", "gitBranch": "PR-2207-89209ce1", "buildTime": "2025-11-06T11:54:31Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-11-06T14:03:57.510Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1139000"} 2025-11-06T14:03:57.513Z INFO setup Registering Components. 2025-11-06T14:03:58.191Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-11-06T14:03:58.191Z INFO controller-runtime.metrics Starting metrics server 2025-11-06T14:03:58.191Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-11-06T14:03:58.191Z INFO controller-runtime.webhook Starting webhook server 2025-11-06T14:03:58.191Z INFO setup Starting the Cmd. 2025-11-06T14:03:58.191Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-11-06T14:03:58.192Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-11-06T14:03:58.192Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-11-06T14:03:58.192Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-11-06T14:03:58.293Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-11-06T14:03:58.324Z DEBUG events percona-xtradb-cluster-operator-58dd9fd94c-xtqwz_ae35caac-b4a6-4d37-9d23-6c3477961faf became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"a96e23dc-a0de-4624-832d-2a2f891c25c0","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1762437838316303009"}, "reason": "LeaderElection"} 2025-11-06T14:03:58.324Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-11-06T14:03:58.325Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-11-06T14:03:58.325Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-11-06T14:03:58.325Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-11-06T14:03:58.325Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-11-06T14:03:58.425Z INFO Starting Controller {"controller": "pxc-controller"} 2025-11-06T14:03:58.425Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-11-06T14:03:58.425Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-11-06T14:03:58.425Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-11-06T14:03:58.427Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-11-06T14:03:58.427Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-11-06T14:04:33.843Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "b34a391c-f24c-43d4-9dda-5659fbd904ca", "version": "1.19.0"} 2025-11-06T14:04:34.081Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "b34a391c-f24c-43d4-9dda-5659fbd904ca", "secrets": "my-cluster-secrets"} 2025-11-06T14:04:34.299Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "b34a391c-f24c-43d4-9dda-5659fbd904ca", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-06T14:04:34.313Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "b34a391c-f24c-43d4-9dda-5659fbd904ca", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-06T14:04:34.874Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "b34a391c-f24c-43d4-9dda-5659fbd904ca", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:04:34.995Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "88143d17-7fe5-4593-b4e7-1a9e7d8c7268", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-06T14:04:35.063Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "88143d17-7fe5-4593-b4e7-1a9e7d8c7268", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-06T14:04:35.130Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "88143d17-7fe5-4593-b4e7-1a9e7d8c7268", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-06T14:04:35.178Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "88143d17-7fe5-4593-b4e7-1a9e7d8c7268", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-06T14:04:35.267Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "88143d17-7fe5-4593-b4e7-1a9e7d8c7268", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-06T14:04:35.363Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "88143d17-7fe5-4593-b4e7-1a9e7d8c7268", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-06T14:04:36.259Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "514ef768-8126-46d2-9480-df971f3d5370", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-06T14:04:36.284Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "514ef768-8126-46d2-9480-df971f3d5370", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-06T14:05:52.659Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "3bad9e81-eda1-49e1-83f8-c85bc4967e01", "user": "operator"} 2025-11-06T14:05:52.690Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "3bad9e81-eda1-49e1-83f8-c85bc4967e01", "user": "monitor"} 2025-11-06T14:05:52.724Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "3bad9e81-eda1-49e1-83f8-c85bc4967e01"} 2025-11-06T14:05:52.773Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "3bad9e81-eda1-49e1-83f8-c85bc4967e01", "user": "xtrabackup"} 2025-11-06T14:05:52.804Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "3bad9e81-eda1-49e1-83f8-c85bc4967e01"} 2025-11-06T14:05:52.812Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "3bad9e81-eda1-49e1-83f8-c85bc4967e01", "err": "get primary pxc pod: not found"} 2025-11-06T14:05:57.639Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "8ed076ac-aeb2-4f2b-9df6-2fbbcb12026e", "err": "get primary pxc pod: not found"} 2025-11-06T14:06:02.769Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a312ef11-82e1-4c7f-bc2d-945f25f7f301", "err": "get primary pxc pod: not found"} 2025-11-06T14:08:18.765Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "12f7afe6-709d-4822-9a9b-968abfb10444", "user": "root"} 2025-11-06T14:08:18.801Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "12f7afe6-709d-4822-9a9b-968abfb10444", "user": "replication"} 2025-11-06T14:08:18.849Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "12f7afe6-709d-4822-9a9b-968abfb10444", "new version": "5.7.44-48-57"} 2025-11-06T14:08:20.624Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "12f7afe6-709d-4822-9a9b-968abfb10444"} 2025-11-06T14:08:25.329Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "cf58c966-8681-4286-ac6f-a752c5d6fce7"} 2025-11-06T14:08:30.842Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "b8fe4dd0-e767-48b9-ae2b-19af4ffb04d6"} 2025-11-06T14:08:35.937Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "1c79cd2d-fc17-460b-899d-b68126ed82a1"} 2025-11-06T14:08:41.134Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "857881df-044c-4f13-afcb-fab249182500"} 2025-11-06T14:08:46.753Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a66e5e5b-616b-4c46-993a-defa1a3b6184"} 2025-11-06T14:08:51.654Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "f6edecb3-60c9-42e6-8bc3-930f3e3a933b"} 2025-11-06T14:08:56.823Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "792c9d29-be2a-43b3-8d3f-99b5680f5d33"} 2025-11-06T14:09:02.519Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a3363cef-db47-4f4c-98d0-1c5cda434d77"} 2025-11-06T14:09:07.926Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6b76e450-5564-4bea-890f-08138acf30c3"} 2025-11-06T14:09:13.416Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "32f85a0f-4429-4dcd-ab46-204df47668b8"} 2025-11-06T14:09:18.538Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "9e969e5b-f935-48a5-b583-b8b6c2713769"} 2025-11-06T14:09:23.840Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "df4b19f8-969f-42d7-a0d5-c858dad813a9"} 2025-11-06T14:09:29.029Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "f3556b6c-cbdf-454e-ad59-bc9fb7c7fac8"} 2025-11-06T14:09:34.254Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "5997ac7a-5224-470d-abf3-66cc70740948"} 2025-11-06T14:09:39.306Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "589f71bb-53e4-40d9-9f8b-0e787a6609b7"} 2025-11-06T14:09:44.715Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "d0af1eb7-9594-4ea5-a807-b452db892adb"} 2025-11-06T14:09:50.024Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "88aeee35-6088-46c5-8e93-ad732f0d8a24"} 2025-11-06T14:09:55.261Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "3955badb-aa26-49fa-8426-c7749395709c"} 2025-11-06T14:10:00.491Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "316834c6-9007-46c1-a333-d3e3a4d83eca"} 2025-11-06T14:10:05.525Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "8576cd37-5141-4b76-8a09-e8c1e90fe206"} 2025-11-06T14:10:10.176Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "c22ec746-2f19-4aca-a822-9d158a818faf", "user": "root"} 2025-11-06T14:10:10.191Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "c22ec746-2f19-4aca-a822-9d158a818faf", "user": "root"} 2025-11-06T14:10:10.211Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "c22ec746-2f19-4aca-a822-9d158a818faf", "secret": "some-name-mysql-init", "user": "root"} 2025-11-06T14:10:10.939Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "7a2bb15f-6aa4-46bb-a874-c5174f0b57f9"} 2025-11-06T14:10:12.435Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "c22ec746-2f19-4aca-a822-9d158a818faf"} 2025-11-06T14:10:12.480Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "c22ec746-2f19-4aca-a822-9d158a818faf", "user": "root"} 2025-11-06T14:10:14.346Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "c22ec746-2f19-4aca-a822-9d158a818faf"} 2025-11-06T14:10:16.520Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "7662e446-f6de-429e-8ead-284d3da3860d"} 2025-11-06T14:10:21.442Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "7a5b3028-9f62-4024-9840-dccdfb33241b"} 2025-11-06T14:10:26.633Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "fc60ae59-c139-45e6-a9dc-7142770ec998"} 2025-11-06T14:10:31.483Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "0d001b6e-7a4f-4664-ab40-71d1c5cfc0fe", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:10:31.538Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "0d001b6e-7a4f-4664-ab40-71d1c5cfc0fe", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:10:32.749Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "1d1c2fd5-5872-4d6e-a0a0-56285866cfc9"} 2025-11-06T14:10:57.170Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "db31ecc9-f644-4663-8b45-9a97de702de2", "user": "proxyadmin"} 2025-11-06T14:10:57.170Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "db31ecc9-f644-4663-8b45-9a97de702de2", "user": "proxyadmin"} 2025-11-06T14:10:57.175Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "8f0aaee9-c87a-41cb-b525-78be6ecc8d82", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:10:57.200Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "db31ecc9-f644-4663-8b45-9a97de702de2", "user": "proxyadmin"} 2025-11-06T14:10:57.224Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "db31ecc9-f644-4663-8b45-9a97de702de2", "user": "proxyadmin"} 2025-11-06T14:10:57.224Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "db31ecc9-f644-4663-8b45-9a97de702de2", "last-applied-secret": "381187c3bdc5d1bc76ee9ddbb425cde62aa68adca9d2f1f26b5ad7d700a95bf1"} 2025-11-06T14:10:57.228Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "db31ecc9-f644-4663-8b45-9a97de702de2", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:10:57.287Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "db31ecc9-f644-4663-8b45-9a97de702de2", "err": "get primary pxc pod: not found"} 2025-11-06T14:10:58.974Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "db31ecc9-f644-4663-8b45-9a97de702de2", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:11:37.908Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "b2af5875-28f0-452d-8fad-d8c186f964e7"} 2025-11-06T14:11:43.112Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "87d936b8-1bd8-41cc-bb89-8465b5cec09d"} 2025-11-06T14:11:44.363Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "30cb945e-1fbc-4239-8ddc-dd57990d19d1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:11:44.412Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "30cb945e-1fbc-4239-8ddc-dd57990d19d1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:11:46.381Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "30cb945e-1fbc-4239-8ddc-dd57990d19d1"} 2025-11-06T14:11:46.447Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "480c14d1-308b-454b-b427-e72cbebb140a", "user": "xtrabackup"} 2025-11-06T14:11:46.459Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "480c14d1-308b-454b-b427-e72cbebb140a", "user": "xtrabackup"} 2025-11-06T14:11:46.479Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "480c14d1-308b-454b-b427-e72cbebb140a", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-06T14:11:46.521Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "480c14d1-308b-454b-b427-e72cbebb140a", "user": "xtrabackup"} 2025-11-06T14:11:46.521Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "480c14d1-308b-454b-b427-e72cbebb140a", "last-applied-secret": "c3cee91feb24fea8cbf5d9e1b9f4ff2a0ba0f75db07973417a79a5a819e98e27"} 2025-11-06T14:11:46.524Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "480c14d1-308b-454b-b427-e72cbebb140a", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:11:49.098Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "480c14d1-308b-454b-b427-e72cbebb140a"} 2025-11-06T14:13:30.519Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "58539447-f9fd-474a-b2f2-6be095f0f487", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-4487 on 34.118.224.10:53: no such host"} 2025-11-06T14:13:31.276Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "195a125d-b0a5-490c-8f69-9d75d0b4bd73", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-4487 on 34.118.224.10:53: no such host"} 2025-11-06T14:13:41.513Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e0ba4c0b-373d-40a1-803f-edd115011a0b", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-4487 on 34.118.224.10:53: no such host"} 2025-11-06T14:13:46.689Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "c0f062de-197c-4787-9a4e-6b0fb72d84ff", "primary name": "some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local"} 2025-11-06T14:13:51.831Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "5035e9fc-38f2-4ea5-a47d-a58081041bee", "primary name": "some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local"} 2025-11-06T14:13:56.973Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "55c605dc-7d2d-40be-848f-fa393309a6b4", "primary name": "some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local"} 2025-11-06T14:14:02.126Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e1ef0bfe-dc67-4742-960c-41c6b603406d", "primary name": "some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local"} 2025-11-06T14:14:12.470Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a6e24011-18f5-4f2a-a120-bfd182f51a87", "primary name": "some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local"} 2025-11-06T14:14:20.179Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "5ecd93f4-b919-4f2f-b22b-6792a47629f7"} 2025-11-06T14:14:24.945Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e88d781b-9fcd-40d2-b288-65efc7dbec08", "user": "monitor"} 2025-11-06T14:14:24.957Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e88d781b-9fcd-40d2-b288-65efc7dbec08", "user": "monitor"} 2025-11-06T14:14:24.986Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e88d781b-9fcd-40d2-b288-65efc7dbec08", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-06T14:14:25.029Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e88d781b-9fcd-40d2-b288-65efc7dbec08", "user": "monitor"} 2025-11-06T14:14:25.054Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e88d781b-9fcd-40d2-b288-65efc7dbec08", "user": "monitor"} 2025-11-06T14:14:25.054Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e88d781b-9fcd-40d2-b288-65efc7dbec08", "last-applied-secret": "d59bb29d019e3f0e4d6860deb864450af53640e1ce1e99ca56cbaf1c9467defa"} 2025-11-06T14:14:25.062Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e88d781b-9fcd-40d2-b288-65efc7dbec08", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:14:26.499Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "93c95798-35aa-4e55-920b-387b314548dc", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: monitor\nAdding user to ProxySQL: monitor\n / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: monitor\nAdding user to ProxySQL: monitor\n / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:15:15.594Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "14df0adf-f92c-42e0-9ea9-c34ffcf66491"} 2025-11-06T14:15:20.167Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "dc373076-63ba-4aaf-9531-4156e6bc007e"} 2025-11-06T14:15:25.607Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "2244e4f1-124d-4008-a1fe-abb5c4e3c2a3"} 2025-11-06T14:15:31.416Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "91d0f816-0483-484d-8e8e-4f774a15e188"} 2025-11-06T14:15:33.643Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a47d2614-b831-472a-b448-3fe9146c9319", "user": "operator"} 2025-11-06T14:15:33.656Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a47d2614-b831-472a-b448-3fe9146c9319", "user": "operator"} 2025-11-06T14:15:33.678Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a47d2614-b831-472a-b448-3fe9146c9319", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-06T14:15:33.699Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a47d2614-b831-472a-b448-3fe9146c9319", "user": "operator"} 2025-11-06T14:15:33.699Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a47d2614-b831-472a-b448-3fe9146c9319", "last-applied-secret": "1ce0489e43e4ff42c6b360c482e0a0f63ce438cc7b1f7638f45eab08637eb4c4"} 2025-11-06T14:15:33.702Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a47d2614-b831-472a-b448-3fe9146c9319", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:15:37.445Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a47d2614-b831-472a-b448-3fe9146c9319", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:16:02.071Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "867f225f-af88-4f2e-bc59-aca37c653de5"} 2025-11-06T14:16:06.671Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "f7ff0937-3aed-4255-a79c-99e6c6ff69d4"} 2025-11-06T14:16:11.793Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "35389afe-37ed-48d4-9875-db3430068513"} 2025-11-06T14:16:17.693Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "58c76073-b964-401b-bcea-defc803397df"} 2025-11-06T14:16:20.328Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "secrets": "my-cluster-secrets-2"} 2025-11-06T14:16:20.328Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "root"} 2025-11-06T14:16:20.343Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "root"} 2025-11-06T14:16:20.365Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "secret": "some-name-mysql-init", "user": "root"} 2025-11-06T14:16:22.873Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf"} 2025-11-06T14:16:22.903Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "root"} 2025-11-06T14:16:22.903Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "operator"} 2025-11-06T14:16:22.914Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "operator"} 2025-11-06T14:16:22.938Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-06T14:16:22.963Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "operator"} 2025-11-06T14:16:22.963Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "monitor"} 2025-11-06T14:16:22.976Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "monitor"} 2025-11-06T14:16:23.004Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-06T14:16:23.022Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "monitor"} 2025-11-06T14:16:23.051Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "monitor"} 2025-11-06T14:16:23.051Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "xtrabackup"} 2025-11-06T14:16:23.062Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "xtrabackup"} 2025-11-06T14:16:23.082Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-06T14:16:23.104Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "xtrabackup"} 2025-11-06T14:16:23.104Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "replication"} 2025-11-06T14:16:23.115Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "replication"} 2025-11-06T14:16:23.137Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-06T14:16:23.160Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "replication"} 2025-11-06T14:16:23.160Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "proxyadmin"} 2025-11-06T14:16:23.178Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "proxyadmin"} 2025-11-06T14:16:23.201Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "user": "proxyadmin"} 2025-11-06T14:16:23.201Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "last-applied-secret": "2ed6decd120d956406bd243d770839aeb06cce7d423668014e66ef8bd0fd6e33"} 2025-11-06T14:16:23.201Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "last-applied-secret": "2ed6decd120d956406bd243d770839aeb06cce7d423668014e66ef8bd0fd6e33"} 2025-11-06T14:16:23.204Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:16:23.263Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:16:25.384Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6f4874d0-4487-4b4b-99c3-4269f4f6a2cf", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:17:58.285Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "83ba707f-df15-4100-b0ba-5e21d3d3aef2", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-4487 on 34.118.224.10:53: no such host"} 2025-11-06T14:17:58.997Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "fde4c402-23d1-490f-aafa-04e8d4330d26", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-4487 on 34.118.224.10:53: no such host"} 2025-11-06T14:18:03.991Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "4fd8a14d-b468-4b9e-8aff-65bb6f3fb28d", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-4487 on 34.118.224.10:53: no such host"} 2025-11-06T14:18:09.251Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "bd7511bf-2f45-4a2a-bb88-59ecd1887764", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-4487 on 34.118.224.10:53: no such host"} 2025-11-06T14:18:14.414Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dd60f2d-3f7c-478f-8eed-044f5ca03888", "primary name": "some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local"} 2025-11-06T14:18:19.568Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "5d781154-b479-4666-88f6-23dd4ed9a827", "primary name": "some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local"} 2025-11-06T14:18:24.721Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "49e5c07a-a006-4143-bafe-5840aa4bf45b", "primary name": "some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local"} 2025-11-06T14:18:29.903Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "0e878a56-8a56-4c63-ac23-8d54f5b939e8", "primary name": "some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local"} 2025-11-06T14:18:35.052Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a697eecc-83d5-4b51-a2c6-9238e64546a7", "primary name": "some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local"} 2025-11-06T14:18:40.205Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "43d3ef29-dfb2-48ef-8638-ed21bd329ea4", "primary name": "some-name-pxc-0.some-name-pxc.users-4487.svc.cluster.local"} 2025-11-06T14:18:48.204Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "2d5b1f02-73fe-40c5-81da-d615fabb9f0b"} 2025-11-06T14:18:50.755Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "7230a9f9-524b-4efa-99c1-7060b5de12db", "user": "operator"} 2025-11-06T14:18:50.765Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "7230a9f9-524b-4efa-99c1-7060b5de12db", "user": "operator"} 2025-11-06T14:18:50.784Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "7230a9f9-524b-4efa-99c1-7060b5de12db", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-06T14:18:50.805Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "7230a9f9-524b-4efa-99c1-7060b5de12db", "user": "operator"} 2025-11-06T14:18:50.805Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "7230a9f9-524b-4efa-99c1-7060b5de12db", "last-applied-secret": "3b399880f57822ec3c1e24a916fe16d481a94516ef864f66830b6f7875e5410d"} 2025-11-06T14:18:50.808Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "7230a9f9-524b-4efa-99c1-7060b5de12db", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:18:54.464Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "7230a9f9-524b-4efa-99c1-7060b5de12db", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-4487.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:19:44.709Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "77652f25-af43-45c0-b2d8-bfc9b85ef81d"} 2025-11-06T14:19:49.186Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "8bca1ee5-b2a8-4d08-8fcf-175407b54475"} 2025-11-06T14:19:54.444Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "794fd5b6-fc53-414d-a88d-68b0957d3f0a"} 2025-11-06T14:19:59.770Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "196c5892-6440-48c1-819a-32e465672317"} 2025-11-06T14:20:04.785Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "774f3bac-2e43-406e-9499-9849b198ed4b"} 2025-11-06T14:20:10.268Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "613d4ad4-a6d5-40d1-a057-b9247c206039"} 2025-11-06T14:20:15.358Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "17f3429c-17c9-41fa-ae58-4595f7c746ef"} 2025-11-06T14:20:21.307Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "3401bd7e-89e0-4071-bec2-74dd8cee4c1e"} 2025-11-06T14:20:26.077Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "b7aaaa74-eede-4d7b-a58c-76f3891a1d08"} 2025-11-06T14:20:31.673Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "88f2db3a-c552-4363-8a86-fc99d30414fd"} 2025-11-06T14:20:36.559Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "d3f243d7-d402-4382-a363-522c5c8f39b4"} 2025-11-06T14:20:41.791Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "92425c33-2448-4f81-b5e9-239c62b491a5"} 2025-11-06T14:20:47.183Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "2a72a333-3ceb-4cef-b238-56476df626f8"} 2025-11-06T14:20:52.546Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "1f8c2f6d-7ce4-4dbf-9748-c0419231587f"} 2025-11-06T14:20:57.764Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "0b70a477-e0a4-4640-85aa-53c4359abd37"} 2025-11-06T14:21:02.887Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "root"} 2025-11-06T14:21:02.903Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "root"} 2025-11-06T14:21:02.926Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "secret": "some-name-mysql-init", "user": "root"} 2025-11-06T14:21:03.084Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "af3827a9-5f7c-4abf-8fa4-824b16419cb6"} 2025-11-06T14:21:05.400Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d"} 2025-11-06T14:21:05.418Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "root"} 2025-11-06T14:21:05.418Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "monitor"} 2025-11-06T14:21:05.430Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "monitor"} 2025-11-06T14:21:05.455Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-06T14:21:05.473Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "monitor"} 2025-11-06T14:21:05.495Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "monitor"} 2025-11-06T14:21:05.495Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "xtrabackup"} 2025-11-06T14:21:05.507Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "xtrabackup"} 2025-11-06T14:21:05.526Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-06T14:21:05.553Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "xtrabackup"} 2025-11-06T14:21:05.553Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "proxyadmin"} 2025-11-06T14:21:05.573Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "proxyadmin"} 2025-11-06T14:21:05.592Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "user": "proxyadmin"} 2025-11-06T14:21:05.592Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "last-applied-secret": "780a4fdcc482c4496b0d917e331f93aba9b8b00a90aaf5b0a0a9d3df8bfa3019"} 2025-11-06T14:21:05.592Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "last-applied-secret": "780a4fdcc482c4496b0d917e331f93aba9b8b00a90aaf5b0a0a9d3df8bfa3019"} 2025-11-06T14:21:05.595Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:21:05.685Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:21:07.300Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "e4ee189f-8264-402d-878a-6661b7aee60d", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-06T14:21:26.707Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "8bb10646-2fd8-4cd8-a62e-4c81f7c5173d", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:21:26.753Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "8bb10646-2fd8-4cd8-a62e-4c81f7c5173d", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-06T14:21:26.829Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "8bb10646-2fd8-4cd8-a62e-4c81f7c5173d", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-06T14:21:26.858Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "8bb10646-2fd8-4cd8-a62e-4c81f7c5173d", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-06T14:21:26.991Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "8bb10646-2fd8-4cd8-a62e-4c81f7c5173d", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-06T14:23:22.736Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "f8db58c2-cc29-43f8-aad5-30532cac7c2a", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-4487 on 34.118.224.10:53: no such host"} 2025-11-06T14:24:05.552Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "root"} 2025-11-06T14:24:05.568Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "root"} 2025-11-06T14:24:05.592Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "secret": "some-name-mysql-init", "user": "root"} 2025-11-06T14:24:05.616Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "root"} 2025-11-06T14:24:05.616Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "operator"} 2025-11-06T14:24:05.628Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "operator"} 2025-11-06T14:24:05.653Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-06T14:24:05.673Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "operator"} 2025-11-06T14:24:05.673Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "monitor"} 2025-11-06T14:24:05.685Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "monitor"} 2025-11-06T14:24:05.702Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-06T14:24:05.720Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "monitor"} 2025-11-06T14:24:05.720Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "xtrabackup"} 2025-11-06T14:24:05.733Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "xtrabackup"} 2025-11-06T14:24:05.754Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-06T14:24:05.778Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "xtrabackup"} 2025-11-06T14:24:05.778Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "replication"} 2025-11-06T14:24:05.789Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "replication"} 2025-11-06T14:24:05.808Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-06T14:24:05.857Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "last-applied-secret": "1ce0489e43e4ff42c6b360c482e0a0f63ce438cc7b1f7638f45eab08637eb4c4"} 2025-11-06T14:24:05.857Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "user": "replication"} 2025-11-06T14:24:05.857Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "last-applied-secret": "1ce0489e43e4ff42c6b360c482e0a0f63ce438cc7b1f7638f45eab08637eb4c4"} 2025-11-06T14:24:05.859Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:24:05.925Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "6dcfd032-28d7-4fba-9a47-5acab63c01fd", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-06T14:25:36.309Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "f6b80463-9eec-4ba9-94ad-44807be3efd0", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-4487 on 34.118.224.10:53: no such host"} 2025-11-06T14:25:36.697Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "104ad340-f996-43a9-9c5a-bb9a91a9d866", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-4487 on 34.118.224.10:53: no such host"} 2025-11-06T14:25:42.035Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "a9a47944-7d4d-4379-bb8a-8b40a62d06c6", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-4487 on 34.118.224.10:53: no such host"} 2025-11-06T14:26:23.411Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "99a77ea4-7aa5-4c42-a918-8fd58fe6545f", "user": "monitor"} 2025-11-06T14:26:23.424Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "99a77ea4-7aa5-4c42-a918-8fd58fe6545f", "user": "monitor"} 2025-11-06T14:26:23.446Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "99a77ea4-7aa5-4c42-a918-8fd58fe6545f", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-06T14:26:23.467Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "99a77ea4-7aa5-4c42-a918-8fd58fe6545f", "last-applied-secret": "35001851488b2a413f5c691f6f0247633046129d9fd9637907a6c6125e57f0e2"} 2025-11-06T14:26:23.467Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "99a77ea4-7aa5-4c42-a918-8fd58fe6545f", "user": "monitor"} 2025-11-06T14:26:23.470Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-4487", "name": "some-name", "reconcileID": "99a77ea4-7aa5-4c42-a918-8fd58fe6545f", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} ... // 22 identical fields + "2ec3c1e24a916fe16d481a94516ef864f66830b6f7875e5410d", - "2ed6decd120d956406bd243d770839aeb06cce7d423668014e66ef8bd0fd6e33", + "2ed6decd120d956406bd243d770839aeb06cce7d423668014e66ef8bd0fd6e33", ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields + "35001851488b2a413f5c691f6f0247633046129d9fd9637907a6c6125e57f0e2", - "381187c3b", + "3b399880f578", - "3b399880f57822ec3c1e24a916fe16d481a94516ef864f66830b6f7875e5410d", ... // 3 identical elements ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields ... // 4 identical fields + "59bb29d019e3f0e4d6860deb864450af53640e1ce1e99ca56cbaf1c9467defa", ... // 5 identical fields ... // 5 identical fields ... // 5 identical fields ... // 6 identical fields ... // 6 identical fields - "780a4fdcc482c4496b0d917e331f93aba9b8b00a90aaf5b0a0a9d3df8bfa3019", + "780a4fdcc482c4496b0d917e331f93aba9b8b00a90aaf5b0a0a9d3df8bfa3019", ... // 7 identical fields ... // 8 identical fields ... // 9 identical fields ... // 9 identical fields AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ + APIVersion: "", - APIVersion: "apps/v1", - APIVersion: "apps/v1", - APIVersion: "v1", Args: {"haproxy"}, Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, - Args: []string{"logrotate"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, - "c3cee91feb24fea8cbf5d9e1b9f4ff2a0ba0f75db07973417a79a5a819e98e27", - "c5d1bc76ee9ddbb425cde62aa68adca9d2f1f26b5ad7d700a95bf1", Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMapKeyRef: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 3307, ContainerPort: 3309, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, ContainerPort: 8404, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-11-06 14:04:35 +0000 UTC"}, - CreationTimestamp: v1.Time{Time: s"2025-11-06 14:21:26 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-haproxy-6b449bbb48", - CurrentRevision: "some-name-haproxy-84d7fdc9d", - CurrentRevision: "some-name-proxysql-696c78d474", - CurrentRevision: "some-name-proxysql-6c96cbb944", - CurrentRevision: "some-name-proxysql-6fdb9b54d8", - CurrentRevision: "some-name-proxysql-769bbdb49c", - CurrentRevision: "some-name-proxysql-797c8b58b6", - CurrentRevision: "some-name-proxysql-f7cdc9789", - CurrentRevision: "some-name-pxc-69d44544bd", - CurrentRevision: "some-name-pxc-78b68b7cc", - CurrentRevision: "some-name-pxc-7f675455bf", - CurrentRevision: "some-name-pxc-fd7b9b65f", "d", - "d59bb29d019e3f0e4d6860deb864450af53640e1ce1e99ca56cbaf1c9467defa", DataSource: nil, DataSourceRef: nil, - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", - "ed6decd120d956406bd243d770839aeb06cce7d423668014e66ef8bd0fd6e33", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-haproxy"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, - EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "READINESS_CHECK_TIMEOUT", Value: "1"}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, Env: []v1.EnvVar{ - Env: []v1.EnvVar{ EphemeralContainers: nil, FailureThreshold: 3, FC: nil, FieldPath: "metadata.name", FieldPath: "metadata.namespace", FieldRef: &v1.ObjectFieldSelector{ - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 HostAliases: nil, HostAliases: nil, HostIP: "", HostIPC: false, Hostname: "", HostPort: 0, - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", ImagePullPolicy: "Always", - ImagePullPolicy: "Always", ImagePullSecrets: nil, InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "haproxy", "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "1ce0489e43e4ff42c6b360c482e0a0f63ce438cc7b1f7638f45eab08637eb4c4", + "last-applied-secret": "381187c3bdc5d1bc76ee9ddbb425cde62aa68adca9d2f1f26b5ad7d700a95bf1", + "last-applied-secret": "c3cee91feb24fea8cbf5d9e1b9f4ff2a0ba0f75db07973417a79a5a819e98e27", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-haproxy"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, [mysql] 2025/11/06 14:23:43 packets.go:58 unexpected EOF Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, {Name: "CLUSTER_HASH", Value: "4397088"}, Name: "config", Name: "DEFAULT_AUTHENTICATION_PLUGIN", {Name: "haproxy-auto", VolumeSource: {EmptyDir: &{}}}, Name: "haproxy-custom", - {Name: "IS_LOGCOLLECTOR", Value: "yes"}, Name: "ist", {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - Name: "logrotate", - Name: "logs", {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, - {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"}, Name: "mysql-replicas", {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"}, Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, Name: "POD_NAME", Name: "POD_NAMESPASE", - {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, - {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, Name: "proxyadm", Name: "proxy-protocol", {Name: "READINESS_CHECK_TIMEOUT", Value: "15"}, - {Name: "SERVICE_TYPE", Value: "mysql"}, Name: "some-name-env-vars-haproxy", Namespace: "users-4487", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "381187c3bdc5d1bc76ee9ddbb425cde62aa68adca9d2f1f26b5ad7d700a95bf1", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "780a4fdcc482c4496b0d917e331f93aba9b8b00a90aaf5b0a0a9d3df8bfa3019", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "780ef686-21ad-4a38-a27b-52facdf145bd", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiM2IzOTk4ODBmNTc4MjJlYzNjMWUyNGE5MTZmZTE2ZDQ4MWE5NDUxNmVmODY0ZjY2ODMwYjZmNzg3NWU1NDEwZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiM2IzOTk4ODBmNTc4MjJlYzNjMWUyNGE5MTZmZTE2ZDQ4MWE5NDUxNmVmODY0ZjY2ODMwYjZmNzg3NWU1NDEwZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmVkNmRlY2QxMjBkOTU2NDA2YmQyNDNkNzcwODM5YWViMDZjY2U3ZDQyMzY2ODAxNGU2NmVmOGJkMGZkNmUzMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmVkNmRlY2QxMjBkOTU2NDA2YmQyNDNkNzcwODM5YWViMDZjY2U3ZDQyMzY2ODAxNGU2NmVmOGJkMGZkNmUzMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMWNlMDQ4OWU0M2U0ZmY0MmM2YjM2MGM0ODJlMGEwZjYzY2U0MzhjYzdiMWY3NjM4ZjQ1ZWFiMDg2MzdlYjRjNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMWNlMDQ4OWU0M2U0ZmY0MmM2YjM2MGM0ODJlMGEwZjYzY2U0MzhjYzdiMWY3NjM4ZjQ1ZWFiMDg2MzdlYjRjNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzgxMTg3YzNiZGM1ZDFiYzc2ZWU5ZGRiYjQyNWNkZTYyYWE2OGFkY2E5ZDJmMWYyNmI1YWQ3ZDcwMGE5NWJmMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzgwYTRmZGNjNDgyYzQ0OTZiMGQ5MTdlMzMxZjkzYWJhOWI4YjAwYTkwYWFmNWIwYTBhOWQzZGY4YmZhMzAxOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDU5YmIyOWQwMTllM2YwZTRkNjg2MGRlYjg2NDQ1MGFmNTM2NDBlMWNlMWU5OWNhNTZjYmFmMWM5NDY3ZGVmYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDU5YmIyOWQwMTllM2YwZTRkNjg2MGRlYjg2NDQ1MGFmNTM2NDBlMWNlMWU5OWNhNTZjYmFmMWM5NDY3ZGVmYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMWNlMDQ4OWU0M2U0ZmY0MmM2YjM2MGM0ODJlMGEwZjYzY2U0MzhjYzdiMWY3NjM4ZjQ1ZWFiMDg2MzdlYjRjNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMWNlMDQ4OWU0M2U0ZmY0MmM2YjM2MGM0ODJlMGEwZjYzY2U0MzhjYzdiMWY3NjM4ZjQ1ZWFiMDg2MzdlYjRjNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzUwMDE4NTE0ODhiMmE0MTNmNWM2OTFmNmYwMjQ3NjMzMDQ2MTI5ZDlmZDk2Mzc5MDdhNmM2MTI1ZTU3ZjBlMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSJ9fSwic3BlYyI6eyJ2b2x1bWVzIjpbeyJuYW1lIjoiaGFwcm94eS1jdXN0b20iLCJjb25maWdNYXAi"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmVkNmRlY2QxMjBkOTU2NDA2YmQyNDNkNzcwODM5YWViMDZjY2U3ZDQyMzY2ODAxNGU2NmVmOGJkMGZkNmUzMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmVkNmRlY2QxMjBkOTU2NDA2YmQyNDNkNzcwODM5YWViMDZjY2U3ZDQyMzY2ODAxNGU2NmVmOGJkMGZkNmUzMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMWNlMDQ4OWU0M2U0ZmY0MmM2YjM2MGM0ODJlMGEwZjYzY2U0MzhjYzdiMWY3NjM4ZjQ1ZWFiMDg2MzdlYjRjNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzgwYTRmZGNjNDgyYzQ0OTZiMGQ5MTdlMzMxZjkzYWJhOWI4YjAwYTkwYWFmNWIwYTBhOWQzZGY4YmZhMzAxOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzgwYTRmZGNjNDgyYzQ0OTZiMGQ5MTdlMzMxZjkzYWJhOWI4YjAwYTkwYWFmNWIwYTBhOWQzZGY4YmZhMzAxOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzgwYTRmZGNjNDgyYzQ0OTZiMGQ5MTdlMzMxZjkzYWJhOWI4YjAwYTkwYWFmNWIwYTBhOWQzZGY4YmZhMzAxOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjA3LTg5MjA5Y2UxIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM1LjciLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzgwYTRmZGNjNDgyYzQ0OTZiMGQ5MTdlMzMxZjkzYWJhOWI4YjAwYTkwYWFmNWIwYTBhOWQzZGY4YmZhMzAxOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjA3LTg5MjA5Y2UxIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzUuNyIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiI0Mzk3MDg4In0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYzNjZWU5MWZlYjI0ZmVhOGNiZjVkOWUxYjlmNGZmMmEwYmEwZjc1ZGIwNzk3MzQxN2E3OWE1YTgxOWU5OGUyNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYzNjZWU5MWZlYjI0ZmVhOGNiZjVkOWUxYjlmNGZmMmEwYmEwZjc1ZGIwNzk3MzQxN2E3OWE1YTgxOWU5OGUyNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzgxMTg3YzNiZGM1ZDFiYzc2ZWU5ZGRiYjQyNWNkZTYyYWE2OGFkY2E5ZDJmMWYyNmI1YWQ3ZDcwMGE5NWJmMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: nil, Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, ResizePolicy: nil, ResourceFieldRef: nil, Resources: {}, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1762437910805007006", - ResourceVersion: "1762438094882879006", - ResourceVersion: "1762438250217279006", - ResourceVersion: "1762438291243423006", - ResourceVersion: "1762438418452879006", - ResourceVersion: "1762438457645231006", - ResourceVersion: "1762438505580783006", - ResourceVersion: "1762438556722463006", - ResourceVersion: "1762438623057391006", - ResourceVersion: "1762438721474767006", - ResourceVersion: "1762438773995007006", - ResourceVersion: "1762438880746239006", - ResourceVersion: "1762438949593983005", - ResourceVersion: "1762439044030255006", - ResourceVersion: "1762439121890399005", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", + SchedulerName: "", - SchedulerName: "default-scheduler", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-env-vars-haproxy", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, + SecurityContext: nil, - SecurityContext: s"&PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmorProfile:nil,SupplementalGroupsPolicy:nil,SELinux"..., Selector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-haproxy", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, ShareProcessNamespace: nil, sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-11-06 14:04:35 +0000 UTC", - Time: s"2025-11-06 14:05:10 +0000 UTC", - Time: s"2025-11-06 14:08:14 +0000 UTC", - Time: s"2025-11-06 14:10:31 +0000 UTC", - Time: s"2025-11-06 14:10:50 +0000 UTC", - Time: s"2025-11-06 14:10:57 +0000 UTC", - Time: s"2025-11-06 14:11:31 +0000 UTC", - Time: s"2025-11-06 14:11:44 +0000 UTC", - Time: s"2025-11-06 14:11:46 +0000 UTC", - Time: s"2025-11-06 14:13:38 +0000 UTC", - Time: s"2025-11-06 14:14:17 +0000 UTC", - Time: s"2025-11-06 14:14:25 +0000 UTC", - Time: s"2025-11-06 14:15:05 +0000 UTC", - Time: s"2025-11-06 14:15:33 +0000 UTC", - Time: s"2025-11-06 14:15:56 +0000 UTC", - Time: s"2025-11-06 14:16:23 +0000 UTC", - Time: s"2025-11-06 14:17:03 +0000 UTC", - Time: s"2025-11-06 14:18:41 +0000 UTC", - Time: s"2025-11-06 14:18:50 +0000 UTC", - Time: s"2025-11-06 14:19:33 +0000 UTC", - Time: s"2025-11-06 14:21:05 +0000 UTC", - Time: s"2025-11-06 14:21:20 +0000 UTC", - Time: s"2025-11-06 14:21:26 +0000 UTC", - Time: s"2025-11-06 14:22:29 +0000 UTC", - Time: s"2025-11-06 14:24:04 +0000 UTC", - Time: s"2025-11-06 14:24:05 +0000 UTC", - Time: s"2025-11-06 14:25:21 +0000 UTC", Tolerations: {{Key: "node.alpha.kubernetes.io/unreachable", Operator: "Exists", Effect: "NoExecute", TolerationSeconds: &6000}}, Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "0054711e-9023-49c7-8694-5630e77fca40", - UID: "a5d0f8b7-f44e-490a-96f4-5ffe71b6688f", - UID: "ab6e7602-a962-40a2-8a08-2d550dd1bf6c", + UpdatedReplicas: 0, - UpdatedReplicas: 1, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-haproxy-6b449bbb48", - UpdateRevision: "some-name-haproxy-84d7fdc9d", - UpdateRevision: "some-name-proxysql-696c78d474", - UpdateRevision: "some-name-proxysql-6c96cbb944", - UpdateRevision: "some-name-proxysql-6fdb9b54d8", - UpdateRevision: "some-name-proxysql-769bbdb49c", - UpdateRevision: "some-name-proxysql-797c8b58b6", - UpdateRevision: "some-name-proxysql-f7cdc9789", - UpdateRevision: "some-name-pxc-588f6fb748", - UpdateRevision: "some-name-pxc-69d44544bd", - UpdateRevision: "some-name-pxc-78b68b7cc", - UpdateRevision: "some-name-pxc-7f675455bf", - UpdateRevision: "some-name-pxc-fd7b9b65f", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ Value: "", + Value: "caching_sha2_password", ValueFrom: nil, ValueFrom: &v1.EnvVarSource{ - Value: "mysql_native_password", VolumeAttributesClassName: nil, VolumeClaimTemplates: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, - VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-4487 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.QxuZBywIhT ++ mktemp + local LAST_ERR=/tmp/tmp.FS7Ez4hIbC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QxuZBywIhT perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-4487 namespace + cat /tmp/tmp.FS7Ez4hIbC + rm /tmp/tmp.QxuZBywIhT /tmp/tmp.FS7Ez4hIbC + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.3jUQyhtpJa ++ mktemp + local LAST_ERR=/tmp/tmp.FxKQ5x2UOh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3jUQyhtpJa No resources found + cat /tmp/tmp.FxKQ5x2UOh + rm /tmp/tmp.3jUQyhtpJa /tmp/tmp.FxKQ5x2UOh + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.4LozInpmxf ++ mktemp + local LAST_ERR=/tmp/tmp.D5wzhsRdfY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4LozInpmxf No resources found + cat /tmp/tmp.D5wzhsRdfY + rm /tmp/tmp.4LozInpmxf /tmp/tmp.D5wzhsRdfY + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.TTCljpUGrX ++ mktemp + local LAST_ERR=/tmp/tmp.kSHp80Yvoy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TTCljpUGrX validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.kSHp80Yvoy + rm /tmp/tmp.TTCljpUGrX /tmp/tmp.kSHp80Yvoy + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-4487 + rm -rf /tmp/tmp.aKabmIgnk5 + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.l3S3Zc1eXr + desc 'test passed' + set +o xtrace ++ mktemp ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.OkJV990vFs + local LAST_ERR=/tmp/tmp.uSjE3OVZwq + local exit_status=0 ++ mktemp ++ seq 0 2 + for i in '$(seq 0 2)' + local LAST_ERR=/tmp/tmp.HK3wLexz9I + set +e + local exit_status=0 + kubectl delete --grace-period=0 --force=true namespace users-4487 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator