Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/logs/users-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-28609 + local ns=users-28609 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-26265 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.tpxzzbRH8R ++ mktemp + local LAST_ERR=/tmp/tmp.O0cxqQTi0h + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tpxzzbRH8R perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-26265 namespace + cat /tmp/tmp.O0cxqQTi0h + rm /tmp/tmp.tpxzzbRH8R /tmp/tmp.O0cxqQTi0h + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.3fvnopdIVf ++ mktemp + local LAST_ERR=/tmp/tmp.Gxaxj2lens + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3fvnopdIVf No resources found + cat /tmp/tmp.Gxaxj2lens + rm /tmp/tmp.3fvnopdIVf /tmp/tmp.Gxaxj2lens + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.rMzytpQ9C5 ++ mktemp + local LAST_ERR=/tmp/tmp.Zw5425LPgE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rMzytpQ9C5 No resources found + cat /tmp/tmp.Zw5425LPgE + rm /tmp/tmp.rMzytpQ9C5 /tmp/tmp.Zw5425LPgE + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace + xargs kubectl delete ns ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.8bhrb6f0Jc + local LAST_OUT=/tmp/tmp.E8uFQ7Axfi ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.V1TF2RCAye + local exit_status=0 + local LAST_ERR=/tmp/tmp.LtmQY6KRzq ++ seq 0 2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.E8uFQ7Axfi + cat /tmp/tmp.LtmQY6KRzq + rm /tmp/tmp.E8uFQ7Axfi /tmp/tmp.LtmQY6KRzq + return 0 namespace "users-26265" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8bhrb6f0Jc namespace "pxc-operator" deleted + cat /tmp/tmp.V1TF2RCAye + rm /tmp/tmp.8bhrb6f0Jc /tmp/tmp.V1TF2RCAye + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.clAgI9Qd0E ++ mktemp + local LAST_ERR=/tmp/tmp.OXYNnlBYkY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.clAgI9Qd0E namespace/pxc-operator created + cat /tmp/tmp.OXYNnlBYkY + rm /tmp/tmp.clAgI9Qd0E /tmp/tmp.OXYNnlBYkY + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.TFAoGaAdDB +++ mktemp ++ local LAST_ERR=/tmp/tmp.ms1itq7FVy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TFAoGaAdDB ++ cat /tmp/tmp.ms1itq7FVy ++ rm /tmp/tmp.TFAoGaAdDB /tmp/tmp.ms1itq7FVy ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2289-047564f9-3-cluster7 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.YOTo3KUGvc ++ mktemp + local LAST_ERR=/tmp/tmp.gsLsc9MmVn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2289-047564f9-3-cluster7 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YOTo3KUGvc Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2289-047564f9-3-cluster7" modified. + cat /tmp/tmp.gsLsc9MmVn + rm /tmp/tmp.YOTo3KUGvc /tmp/tmp.gsLsc9MmVn + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.cNqHXAR8LE ++ mktemp + local LAST_ERR=/tmp/tmp.xN6O2H0jCo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cNqHXAR8LE customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.xN6O2H0jCo + rm /tmp/tmp.cNqHXAR8LE /tmp/tmp.xN6O2H0jCo + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.LCueo4sUa0 ++ mktemp + local LAST_ERR=/tmp/tmp.gzMEHYltCq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LCueo4sUa0 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.gzMEHYltCq + rm /tmp/tmp.LCueo4sUa0 /tmp/tmp.gzMEHYltCq + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2289-047564f9^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.AKJH9nqmVK ++ mktemp + local LAST_ERR=/tmp/tmp.LArlJB9ZS4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AKJH9nqmVK deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.LArlJB9ZS4 + rm /tmp/tmp.AKJH9nqmVK /tmp/tmp.LArlJB9ZS4 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.bZd8plB0tT ++ mktemp + local LAST_ERR=/tmp/tmp.HJLWXBDAFk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bZd8plB0tT pod/percona-xtradb-cluster-operator-56f8579949-89wjt condition met + cat /tmp/tmp.HJLWXBDAFk + rm /tmp/tmp.bZd8plB0tT /tmp/tmp.HJLWXBDAFk + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.cSmIJCG8ma +++ mktemp ++ local LAST_ERR=/tmp/tmp.ggMmogQXXg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cSmIJCG8ma ++ cat /tmp/tmp.ggMmogQXXg ++ rm /tmp/tmp.cSmIJCG8ma /tmp/tmp.ggMmogQXXg ++ return 0 + wait_pod percona-xtradb-cluster-operator-56f8579949-89wjt 480 pxc-operator + local pod=percona-xtradb-cluster-operator-56f8579949-89wjt + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-56f8579949-89wjt ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-56f8579949-89wjt condition met waiting for pod/percona-xtradb-cluster-operator-56f8579949-89wjt to become Ready.Ok + sleep 3 + create_namespace users-28609 + local namespace=users-28609 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-28609' ++ mktemp + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-28609 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-28609 ++ mktemp + local LAST_OUT=/tmp/tmp.SzK3MH51qh + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.zFHFKyqeSv + local LAST_ERR=/tmp/tmp.NW3msk478x + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.QwMYckszyT + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-28609 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-28609 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SzK3MH51qh + cat /tmp/tmp.NW3msk478x + rm /tmp/tmp.SzK3MH51qh /tmp/tmp.NW3msk478x + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-28609 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.zFHFKyqeSv + cat /tmp/tmp.QwMYckszyT Error from server (NotFound): namespaces "users-28609" not found + rm /tmp/tmp.zFHFKyqeSv /tmp/tmp.QwMYckszyT + return 1 + : + wait_for_delete namespace/users-28609 + local res=namespace/users-28609 + echo -n 'waiting for namespace/users-28609 to be deleted' waiting for namespace/users-28609 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-28609" not found + desc 'create namespace users-28609' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-28609 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-28609 ++ mktemp + local LAST_OUT=/tmp/tmp.LtEijxuEds ++ mktemp + local LAST_ERR=/tmp/tmp.nqScLszyoU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-28609 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LtEijxuEds namespace/users-28609 created + cat /tmp/tmp.nqScLszyoU + rm /tmp/tmp.LtEijxuEds /tmp/tmp.nqScLszyoU + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.IUdKMeidoN +++ mktemp ++ local LAST_ERR=/tmp/tmp.2tZJ2xVscS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IUdKMeidoN ++ cat /tmp/tmp.2tZJ2xVscS ++ rm /tmp/tmp.IUdKMeidoN /tmp/tmp.2tZJ2xVscS ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2289-047564f9-3-cluster7 --namespace=users-28609 ++ mktemp + local LAST_OUT=/tmp/tmp.T8rJeaQdW1 ++ mktemp + local LAST_ERR=/tmp/tmp.SCQwTk6S7I + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2289-047564f9-3-cluster7 --namespace=users-28609 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.T8rJeaQdW1 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2289-047564f9-3-cluster7" modified. + cat /tmp/tmp.SCQwTk6S7I + rm /tmp/tmp.T8rJeaQdW1 /tmp/tmp.SCQwTk6S7I + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.FURZ6FBr03 ++ mktemp + local LAST_ERR=/tmp/tmp.DPJ9UkqOwv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FURZ6FBr03 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.DPJ9UkqOwv + rm /tmp/tmp.FURZ6FBr03 /tmp/tmp.DPJ9UkqOwv + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.tRnyUc7JJI ++ mktemp + local LAST_ERR=/tmp/tmp.NZ2x8XRS0e + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tRnyUc7JJI secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.NZ2x8XRS0e + rm /tmp/tmp.tRnyUc7JJI /tmp/tmp.NZ2x8XRS0e + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/client.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.vX6XgxXtur + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2289-047564f9#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.4lZn7IgKTM + local exit_status=0 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-28609~ ++ seq 0 2 + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vX6XgxXtur deployment.apps/pxc-client created + cat /tmp/tmp.4lZn7IgKTM + rm /tmp/tmp.vX6XgxXtur /tmp/tmp.4lZn7IgKTM + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/some-name.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2289-047564f9#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_OUT=/tmp/tmp.prGVFHTftZ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-28609~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.lakysT4L49 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.prGVFHTftZ perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.lakysT4L49 + rm /tmp/tmp.prGVFHTftZ /tmp/tmp.lakysT4L49 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.pkNHLYxcIh ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ULQot8xXaY +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.pkNHLYxcIh +++ cat /tmp/tmp.ULQot8xXaY +++ rm /tmp/tmp.pkNHLYxcIh /tmp/tmp.ULQot8xXaY +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.j35wRtwkdI ++++ mktemp +++ local LAST_ERR=/tmp/tmp.m1hKHlRRNa +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.j35wRtwkdI +++ cat /tmp/tmp.m1hKHlRRNa +++ rm /tmp/tmp.j35wRtwkdI /tmp/tmp.m1hKHlRRNa +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28609 ++ mktemp + local LAST_OUT=/tmp/tmp.HUqroAeGcb ++ mktemp + local LAST_ERR=/tmp/tmp.kuMnklI5Zc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28609 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28609 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28609 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.HUqroAeGcb + cat /tmp/tmp.kuMnklI5Zc error: no matching resources found + rm /tmp/tmp.HUqroAeGcb /tmp/tmp.kuMnklI5Zc + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ncRKruCrrn +++ mktemp ++ local LAST_ERR=/tmp/tmp.beUbiyH7Iy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ncRKruCrrn ++ cat /tmp/tmp.beUbiyH7Iy ++ rm /tmp/tmp.ncRKruCrrn /tmp/tmp.beUbiyH7Iy ++ return 0 + local 'root_pass==)N!LTQ,jOu.Hc5lC>?' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pYocAKbKkl +++ mktemp ++ local LAST_ERR=/tmp/tmp.K675Fa5BzD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pYocAKbKkl ++ cat /tmp/tmp.K675Fa5BzD ++ rm /tmp/tmp.pYocAKbKkl /tmp/tmp.K675Fa5BzD ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ad0Miz5QOs +++ mktemp ++ local LAST_ERR=/tmp/tmp.9wD7DcWszv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ad0Miz5QOs ++ cat /tmp/tmp.9wD7DcWszv ++ rm /tmp/tmp.Ad0Miz5QOs /tmp/tmp.9wD7DcWszv ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6x9DEMo8Nb +++ mktemp ++ local LAST_ERR=/tmp/tmp.WXoJTyrBcw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6x9DEMo8Nb ++ cat /tmp/tmp.WXoJTyrBcw ++ rm /tmp/tmp.6x9DEMo8Nb /tmp/tmp.WXoJTyrBcw ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-1.sql /tmp/tmp.eAHkEcbyFX/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KatY4N23c1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.N8O4YzIcJU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KatY4N23c1 ++ cat /tmp/tmp.N8O4YzIcJU ++ rm /tmp/tmp.KatY4N23c1 /tmp/tmp.N8O4YzIcJU ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-1.sql /tmp/tmp.eAHkEcbyFX/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1N8PXF5kKA +++ mktemp ++ local LAST_ERR=/tmp/tmp.InmxwVl22Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1N8PXF5kKA ++ cat /tmp/tmp.InmxwVl22Y ++ rm /tmp/tmp.1N8PXF5kKA /tmp/tmp.InmxwVl22Y ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-1.sql /tmp/tmp.eAHkEcbyFX/select-1.sql + is_keyring_plugin_in_use some-name + local cluster=some-name + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + grep -E -o 'early-plugin-load=keyring_\w+.so' ++ mktemp + local LAST_OUT=/tmp/tmp.9IVDoPejoc ++ mktemp + local LAST_ERR=/tmp/tmp.ukC0FBHQiG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9IVDoPejoc + cat /tmp/tmp.ukC0FBHQiG Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.9IVDoPejoc /tmp/tmp.ukC0FBHQiG + return 0 + return 1 + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.khs89weh4O +++ mktemp ++ local LAST_ERR=/tmp/tmp.mjZkHZLUXT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.khs89weh4O ++ cat /tmp/tmp.mjZkHZLUXT ++ rm /tmp/tmp.khs89weh4O /tmp/tmp.mjZkHZLUXT ++ return 0 + secret_pass='=)N!LTQ,jOu.Hc5lC>?' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dti0M4QkBR +++ mktemp ++ local LAST_ERR=/tmp/tmp.pYprFh7phr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dti0M4QkBR ++ cat /tmp/tmp.pYprFh7phr ++ rm /tmp/tmp.Dti0M4QkBR /tmp/tmp.pYprFh7phr ++ return 0 + int_secret_pass='=)N!LTQ,jOu.Hc5lC>?' + [[ -z =)N!LTQ,jOu.Hc5lC>? ]] + [[ =)N!LTQ,jOu.Hc5lC>? != \=\)\N\!\L\T\Q\,\j\O\u\.\H\c\5\l\C\>\? ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''=)N!LTQ,jOu.Hc5lC>?'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n3gD9kIsJo +++ mktemp ++ local LAST_ERR=/tmp/tmp.sv8qp9jGbc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n3gD9kIsJo ++ cat /tmp/tmp.sv8qp9jGbc ++ rm /tmp/tmp.n3gD9kIsJo /tmp/tmp.sv8qp9jGbc ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.eAHkEcbyFX/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ggKCBFwm3l +++ mktemp ++ local LAST_ERR=/tmp/tmp.GeD3eLar40 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ggKCBFwm3l ++ cat /tmp/tmp.GeD3eLar40 ++ rm /tmp/tmp.ggKCBFwm3l /tmp/tmp.GeD3eLar40 ++ return 0 + secret_pass='?Fz@2C=kNs#DcWm_XS' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.uibz269yrP +++ mktemp ++ local LAST_ERR=/tmp/tmp.hZI2VDSKq4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uibz269yrP ++ cat /tmp/tmp.hZI2VDSKq4 ++ rm /tmp/tmp.uibz269yrP /tmp/tmp.hZI2VDSKq4 ++ return 0 + int_secret_pass='?Fz@2C=kNs#DcWm_XS' + [[ -z ?Fz@2C=kNs#DcWm_XS ]] + [[ ?Fz@2C=kNs#DcWm_XS != \?\F\z\@\2\C\=\k\N\s\#\D\c\W\m\_\X\S ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''?Fz@2C=kNs#DcWm_XS'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''?Fz@2C=kNs#DcWm_XS'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''?Fz@2C=kNs#DcWm_XS'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''?Fz@2C=kNs#DcWm_XS'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l7WKJWvLEw +++ mktemp ++ local LAST_ERR=/tmp/tmp.rrJIT8uhSw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l7WKJWvLEw ++ cat /tmp/tmp.rrJIT8uhSw ++ rm /tmp/tmp.l7WKJWvLEw /tmp/tmp.rrJIT8uhSw ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.eAHkEcbyFX/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.S99NonechL +++ mktemp ++ local LAST_ERR=/tmp/tmp.RXh2QSmjYl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S99NonechL ++ cat /tmp/tmp.RXh2QSmjYl ++ rm /tmp/tmp.S99NonechL /tmp/tmp.RXh2QSmjYl ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fd5vzP69fN +++ mktemp ++ local LAST_ERR=/tmp/tmp.rdYJoIVZtV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Fd5vzP69fN ++ cat /tmp/tmp.rdYJoIVZtV ++ rm /tmp/tmp.Fd5vzP69fN /tmp/tmp.rdYJoIVZtV ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cpINzru0cu +++ mktemp ++ local LAST_ERR=/tmp/tmp.etsMHTfvra ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cpINzru0cu ++ cat /tmp/tmp.etsMHTfvra ++ rm /tmp/tmp.cpINzru0cu /tmp/tmp.etsMHTfvra ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.eAHkEcbyFX/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.yFVCRudbxe +++ mktemp ++ local LAST_ERR=/tmp/tmp.gFTRV2KXJK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yFVCRudbxe ++ cat /tmp/tmp.gFTRV2KXJK ++ rm /tmp/tmp.yFVCRudbxe /tmp/tmp.gFTRV2KXJK ++ return 0 + secret_pass='inL7N1B?cdl~R{u#oeE' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.xqqgB8rxbB +++ mktemp ++ local LAST_ERR=/tmp/tmp.R9lRK6XU0x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xqqgB8rxbB ++ cat /tmp/tmp.R9lRK6XU0x ++ rm /tmp/tmp.xqqgB8rxbB /tmp/tmp.R9lRK6XU0x ++ return 0 + int_secret_pass='inL7N1B?cdl~R{u#oeE' + [[ -z inL7N1B?cdl~R{u#oeE ]] + [[ inL7N1B?cdl~R{u#oeE != \i\n\L\7\N\1\B\?\c\d\l\~\R\{\u\#\o\e\E ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''inL7N1B?cdl~R{u#oeE'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''inL7N1B?cdl~R{u#oeE'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''inL7N1B?cdl~R{u#oeE'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''inL7N1B?cdl~R{u#oeE'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.eAHkEcbyFX/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-2.sql /tmp/tmp.eAHkEcbyFX/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.TH8QERNNOJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.2Oke0lHCKq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TH8QERNNOJ ++ cat /tmp/tmp.2Oke0lHCKq ++ rm /tmp/tmp.TH8QERNNOJ /tmp/tmp.2Oke0lHCKq ++ return 0 + secret_pass='eP1$-+b7xLqdt!)YRg' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wnz1UTEfWk +++ mktemp ++ local LAST_ERR=/tmp/tmp.L6jOwxIm4q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Wnz1UTEfWk ++ cat /tmp/tmp.L6jOwxIm4q ++ rm /tmp/tmp.Wnz1UTEfWk /tmp/tmp.L6jOwxIm4q ++ return 0 + int_secret_pass='eP1$-+b7xLqdt!)YRg' + [[ -z eP1$-+b7xLqdt!)YRg ]] + [[ eP1$-+b7xLqdt!)YRg != \e\P\1\$\-\+\b\7\x\L\q\d\t\!\)\Y\R\g ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''eP1$-+b7xLqdt!)YRg'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''eP1$-+b7xLqdt!)YRg'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''eP1$-+b7xLqdt!)YRg'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''eP1$-+b7xLqdt!)YRg'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0I28bNada5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1AQtJYLeR1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0I28bNada5 ++ cat /tmp/tmp.1AQtJYLeR1 ++ rm /tmp/tmp.0I28bNada5 /tmp/tmp.1AQtJYLeR1 ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.eAHkEcbyFX/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.N8HFY7ycFl +++ mktemp ++ local LAST_ERR=/tmp/tmp.axGZnL9FWW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N8HFY7ycFl ++ cat /tmp/tmp.axGZnL9FWW ++ rm /tmp/tmp.N8HFY7ycFl /tmp/tmp.axGZnL9FWW ++ return 0 + secret_pass='q*eiQxS5Y}#aZ][1' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.eOeGHY2mHp +++ mktemp ++ local LAST_ERR=/tmp/tmp.tEavFu6bSb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eOeGHY2mHp ++ cat /tmp/tmp.tEavFu6bSb ++ rm /tmp/tmp.eOeGHY2mHp /tmp/tmp.tEavFu6bSb ++ return 0 + int_secret_pass='q*eiQxS5Y}#aZ][1' + [[ -z q*eiQxS5Y}#aZ][1 ]] + [[ q*eiQxS5Y}#aZ][1 != \q\*\e\i\Q\x\S\5\Y\}\#\a\Z\]\[\1 ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''q*eiQxS5Y}#aZ][1'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''q*eiQxS5Y}#aZ][1'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''q*eiQxS5Y}#aZ][1'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''q*eiQxS5Y}#aZ][1'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7qmT8VNK0v +++ mktemp ++ local LAST_ERR=/tmp/tmp.YVAyUkjf5Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7qmT8VNK0v ++ cat /tmp/tmp.YVAyUkjf5Q ++ rm /tmp/tmp.7qmT8VNK0v /tmp/tmp.YVAyUkjf5Q ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.eAHkEcbyFX/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.6WjgwPzppG ++ mktemp + local LAST_ERR=/tmp/tmp.CjTD7FJFEm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6WjgwPzppG secret/my-cluster-secrets patched + cat /tmp/tmp.CjTD7FJFEm + rm /tmp/tmp.6WjgwPzppG /tmp/tmp.CjTD7FJFEm + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v1flyFB76e +++ mktemp ++ local LAST_ERR=/tmp/tmp.e8YBYjK0CN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v1flyFB76e ++ cat /tmp/tmp.e8YBYjK0CN ++ rm /tmp/tmp.v1flyFB76e /tmp/tmp.e8YBYjK0CN ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.eAHkEcbyFX/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.dyNUWNNHsS ++ mktemp + local LAST_ERR=/tmp/tmp.VATE2QkmT0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dyNUWNNHsS perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.VATE2QkmT0 + rm /tmp/tmp.dyNUWNNHsS /tmp/tmp.VATE2QkmT0 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MPHLkuZzH6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.b7OYHPtF3B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MPHLkuZzH6 ++ cat /tmp/tmp.b7OYHPtF3B ++ rm /tmp/tmp.MPHLkuZzH6 /tmp/tmp.b7OYHPtF3B ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TvAkdbxzpO +++ mktemp ++ local LAST_ERR=/tmp/tmp.7KwPTS4Ck1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TvAkdbxzpO ++ cat /tmp/tmp.7KwPTS4Ck1 ++ rm /tmp/tmp.TvAkdbxzpO /tmp/tmp.7KwPTS4Ck1 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.l4GQUYmKYS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.enFlqVmIzV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.l4GQUYmKYS +++++ cat /tmp/tmp.enFlqVmIzV +++++ rm /tmp/tmp.l4GQUYmKYS /tmp/tmp.enFlqVmIzV +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qqtDACM0J0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.svgyVeVFDN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qqtDACM0J0 +++++ cat /tmp/tmp.svgyVeVFDN +++++ rm /tmp/tmp.qqtDACM0J0 /tmp/tmp.svgyVeVFDN +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FbF1UNgR4L +++ mktemp ++ local LAST_ERR=/tmp/tmp.FZyy4ZBsib ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FbF1UNgR4L ++ cat /tmp/tmp.FZyy4ZBsib ++ rm /tmp/tmp.FbF1UNgR4L /tmp/tmp.FZyy4ZBsib ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.8d7B0xl3YH ++ mktemp + local LAST_ERR=/tmp/tmp.qOaHobwpgN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8d7B0xl3YH secret/my-cluster-secrets patched + cat /tmp/tmp.qOaHobwpgN + rm /tmp/tmp.8d7B0xl3YH /tmp/tmp.qOaHobwpgN + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y3H355KnbG +++ mktemp ++ local LAST_ERR=/tmp/tmp.JPB6uTPcxE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y3H355KnbG ++ cat /tmp/tmp.JPB6uTPcxE ++ rm /tmp/tmp.y3H355KnbG /tmp/tmp.JPB6uTPcxE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.368RuJwK84 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sEP2uGyJRe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.368RuJwK84 ++ cat /tmp/tmp.sEP2uGyJRe ++ rm /tmp/tmp.368RuJwK84 /tmp/tmp.sEP2uGyJRe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.atL2ucKOoM +++ mktemp ++ local LAST_ERR=/tmp/tmp.rZ26WxPCpf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.atL2ucKOoM ++ cat /tmp/tmp.rZ26WxPCpf ++ rm /tmp/tmp.atL2ucKOoM /tmp/tmp.rZ26WxPCpf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.chX5Vlxm4C +++ mktemp ++ local LAST_ERR=/tmp/tmp.BJch6cRdWr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.chX5Vlxm4C ++ cat /tmp/tmp.BJch6cRdWr ++ rm /tmp/tmp.chX5Vlxm4C /tmp/tmp.BJch6cRdWr ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.88im0mbe4d +++ mktemp ++ local LAST_ERR=/tmp/tmp.LTA0TaVBn9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.88im0mbe4d ++ cat /tmp/tmp.LTA0TaVBn9 ++ rm /tmp/tmp.88im0mbe4d /tmp/tmp.LTA0TaVBn9 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Qw0n94Kwzr ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.PIE3P2p1jx +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Qw0n94Kwzr +++++ cat /tmp/tmp.PIE3P2p1jx +++++ rm /tmp/tmp.Qw0n94Kwzr /tmp/tmp.PIE3P2p1jx +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.NbaNt3Y8sU ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.34eNW3EoEg +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.NbaNt3Y8sU +++++ cat /tmp/tmp.34eNW3EoEg +++++ rm /tmp/tmp.NbaNt3Y8sU /tmp/tmp.34eNW3EoEg +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SpVDTHUBNH +++ mktemp ++ local LAST_ERR=/tmp/tmp.xT5eRm0NCb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SpVDTHUBNH ++ cat /tmp/tmp.xT5eRm0NCb ++ rm /tmp/tmp.SpVDTHUBNH /tmp/tmp.xT5eRm0NCb ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.eAHkEcbyFX/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-2.sql /tmp/tmp.eAHkEcbyFX/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.eAHkEcbyFX/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-2.sql /tmp/tmp.eAHkEcbyFX/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.eAHkEcbyFX/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-2.sql /tmp/tmp.eAHkEcbyFX/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Ivmi312cwr ++ mktemp + local LAST_ERR=/tmp/tmp.rly2ufsrCn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ivmi312cwr perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.rly2ufsrCn + rm /tmp/tmp.Ivmi312cwr /tmp/tmp.rly2ufsrCn + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.e8VNczhIIA ++ mktemp + local LAST_ERR=/tmp/tmp.Yo0kHSZ1cv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.e8VNczhIIA secret/my-cluster-secrets patched + cat /tmp/tmp.Yo0kHSZ1cv + rm /tmp/tmp.e8VNczhIIA /tmp/tmp.Yo0kHSZ1cv + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gzBAP2XVr9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.C6laXFnufu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gzBAP2XVr9 ++ cat /tmp/tmp.C6laXFnufu ++ rm /tmp/tmp.gzBAP2XVr9 /tmp/tmp.C6laXFnufu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LpyxquMAX8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DEgqAjJcjv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LpyxquMAX8 ++ cat /tmp/tmp.DEgqAjJcjv ++ rm /tmp/tmp.LpyxquMAX8 /tmp/tmp.DEgqAjJcjv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fuaepmn5nW +++ mktemp ++ local LAST_ERR=/tmp/tmp.sZjUk834rH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Fuaepmn5nW ++ cat /tmp/tmp.sZjUk834rH ++ rm /tmp/tmp.Fuaepmn5nW /tmp/tmp.sZjUk834rH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZLao9pZgAk +++ mktemp ++ local LAST_ERR=/tmp/tmp.im6InQo2EB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZLao9pZgAk ++ cat /tmp/tmp.im6InQo2EB ++ rm /tmp/tmp.ZLao9pZgAk /tmp/tmp.im6InQo2EB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vFGehrPCUW +++ mktemp ++ local LAST_ERR=/tmp/tmp.gxzswRqh4k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vFGehrPCUW ++ cat /tmp/tmp.gxzswRqh4k ++ rm /tmp/tmp.vFGehrPCUW /tmp/tmp.gxzswRqh4k ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I8xZSoPUze +++ mktemp ++ local LAST_ERR=/tmp/tmp.qaEKDjjNdz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I8xZSoPUze ++ cat /tmp/tmp.qaEKDjjNdz ++ rm /tmp/tmp.I8xZSoPUze /tmp/tmp.qaEKDjjNdz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OxlXaKW07Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.GFEoJbFR69 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OxlXaKW07Z ++ cat /tmp/tmp.GFEoJbFR69 ++ rm /tmp/tmp.OxlXaKW07Z /tmp/tmp.GFEoJbFR69 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vF5JlBXRFA +++ mktemp ++ local LAST_ERR=/tmp/tmp.3G4AEN9CSq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vF5JlBXRFA ++ cat /tmp/tmp.3G4AEN9CSq ++ rm /tmp/tmp.vF5JlBXRFA /tmp/tmp.3G4AEN9CSq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cZCETHITKg +++ mktemp ++ local LAST_ERR=/tmp/tmp.wBD7OVQj3G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cZCETHITKg ++ cat /tmp/tmp.wBD7OVQj3G ++ rm /tmp/tmp.cZCETHITKg /tmp/tmp.wBD7OVQj3G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jYBntSw6GK +++ mktemp ++ local LAST_ERR=/tmp/tmp.5tB6q3JFFW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jYBntSw6GK ++ cat /tmp/tmp.5tB6q3JFFW ++ rm /tmp/tmp.jYBntSw6GK /tmp/tmp.5tB6q3JFFW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zeZ8x1dVHn +++ mktemp ++ local LAST_ERR=/tmp/tmp.dDdupaQP7k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zeZ8x1dVHn ++ cat /tmp/tmp.dDdupaQP7k ++ rm /tmp/tmp.zeZ8x1dVHn /tmp/tmp.dDdupaQP7k ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lPmEDOiyuN +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZmKh2bGHip ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lPmEDOiyuN ++ cat /tmp/tmp.ZmKh2bGHip ++ rm /tmp/tmp.lPmEDOiyuN /tmp/tmp.ZmKh2bGHip ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TK1oIraXLS +++ mktemp ++ local LAST_ERR=/tmp/tmp.tYWZpLTL7Z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TK1oIraXLS ++ cat /tmp/tmp.tYWZpLTL7Z ++ rm /tmp/tmp.TK1oIraXLS /tmp/tmp.tYWZpLTL7Z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j7DEis27y4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.x6829VJoIf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j7DEis27y4 ++ cat /tmp/tmp.x6829VJoIf ++ rm /tmp/tmp.j7DEis27y4 /tmp/tmp.x6829VJoIf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6lHcsW5C46 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RO5jLktvnZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6lHcsW5C46 ++ cat /tmp/tmp.RO5jLktvnZ ++ rm /tmp/tmp.6lHcsW5C46 /tmp/tmp.RO5jLktvnZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lmFDuSyjPD +++ mktemp ++ local LAST_ERR=/tmp/tmp.6B90n0zMqT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lmFDuSyjPD ++ cat /tmp/tmp.6B90n0zMqT ++ rm /tmp/tmp.lmFDuSyjPD /tmp/tmp.6B90n0zMqT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EtF8DvNKtp +++ mktemp ++ local LAST_ERR=/tmp/tmp.W8ZfTuWaOW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EtF8DvNKtp ++ cat /tmp/tmp.W8ZfTuWaOW ++ rm /tmp/tmp.EtF8DvNKtp /tmp/tmp.W8ZfTuWaOW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l5VjFDOax1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.M27M7N0lKO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l5VjFDOax1 ++ cat /tmp/tmp.M27M7N0lKO ++ rm /tmp/tmp.l5VjFDOax1 /tmp/tmp.M27M7N0lKO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dEWSHyCNOM +++ mktemp ++ local LAST_ERR=/tmp/tmp.10rxMvXfUy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dEWSHyCNOM ++ cat /tmp/tmp.10rxMvXfUy ++ rm /tmp/tmp.dEWSHyCNOM /tmp/tmp.10rxMvXfUy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0OAesm2BwK +++ mktemp ++ local LAST_ERR=/tmp/tmp.NsSFqortV3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0OAesm2BwK ++ cat /tmp/tmp.NsSFqortV3 ++ rm /tmp/tmp.0OAesm2BwK /tmp/tmp.NsSFqortV3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KCCMD44xVE +++ mktemp ++ local LAST_ERR=/tmp/tmp.Dc28kTmxix ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KCCMD44xVE ++ cat /tmp/tmp.Dc28kTmxix ++ rm /tmp/tmp.KCCMD44xVE /tmp/tmp.Dc28kTmxix ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CpX5UWmtld +++ mktemp ++ local LAST_ERR=/tmp/tmp.QXYoEDKq5l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CpX5UWmtld ++ cat /tmp/tmp.QXYoEDKq5l ++ rm /tmp/tmp.CpX5UWmtld /tmp/tmp.QXYoEDKq5l ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c3XSjgUA1z +++ mktemp ++ local LAST_ERR=/tmp/tmp.jFrLFTjvHR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c3XSjgUA1z ++ cat /tmp/tmp.jFrLFTjvHR ++ rm /tmp/tmp.c3XSjgUA1z /tmp/tmp.jFrLFTjvHR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Cg5VPVcQP +++ mktemp ++ local LAST_ERR=/tmp/tmp.LzmPxfKrUM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Cg5VPVcQP ++ cat /tmp/tmp.LzmPxfKrUM ++ rm /tmp/tmp.3Cg5VPVcQP /tmp/tmp.LzmPxfKrUM ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mYnpffL3Gr +++ mktemp ++ local LAST_ERR=/tmp/tmp.i737avQhAu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mYnpffL3Gr ++ cat /tmp/tmp.i737avQhAu ++ rm /tmp/tmp.mYnpffL3Gr /tmp/tmp.i737avQhAu ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.4J6liaxngp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mfAJSNgCCd +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.4J6liaxngp +++++ cat /tmp/tmp.mfAJSNgCCd +++++ rm /tmp/tmp.4J6liaxngp /tmp/tmp.mfAJSNgCCd +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qVATDXb4cy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.j4dvldJvJo +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qVATDXb4cy +++++ cat /tmp/tmp.j4dvldJvJo +++++ rm /tmp/tmp.qVATDXb4cy /tmp/tmp.j4dvldJvJo +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2yg3L1jLYJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.go1ErfE948 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2yg3L1jLYJ ++ cat /tmp/tmp.go1ErfE948 ++ rm /tmp/tmp.2yg3L1jLYJ /tmp/tmp.go1ErfE948 ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-3-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.eAHkEcbyFX/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-3.sql /tmp/tmp.eAHkEcbyFX/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.JHcdomkewQ ++ mktemp + local LAST_ERR=/tmp/tmp.banIO8cpor + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JHcdomkewQ secret/my-cluster-secrets patched + cat /tmp/tmp.banIO8cpor + rm /tmp/tmp.JHcdomkewQ /tmp/tmp.banIO8cpor + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.mIqWw0b3uj +++ mktemp ++ local LAST_ERR=/tmp/tmp.sP3XZ4fBfT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mIqWw0b3uj ++ cat /tmp/tmp.sP3XZ4fBfT ++ rm /tmp/tmp.mIqWw0b3uj /tmp/tmp.sP3XZ4fBfT ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kQaRIyhOll +++ mktemp ++ local LAST_ERR=/tmp/tmp.9jeL5poMr9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kQaRIyhOll ++ cat /tmp/tmp.9jeL5poMr9 ++ rm /tmp/tmp.kQaRIyhOll /tmp/tmp.9jeL5poMr9 ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace {"additional_password": "$A$005$u\\u000bJ\\u0003\\u0002u`\\u0019'/\\nEnj\\u000b>z\\u0003 \\u0007jHoJsQYiZkU8jQWMwrC8.dVHpcuq6tlXfk6/kOk9G8A"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1J1JcM46iU +++ mktemp ++ local LAST_ERR=/tmp/tmp.dxt4B6VksZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1J1JcM46iU ++ cat /tmp/tmp.dxt4B6VksZ ++ rm /tmp/tmp.1J1JcM46iU /tmp/tmp.dxt4B6VksZ ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RdDIJLzw4B +++ mktemp ++ local LAST_ERR=/tmp/tmp.QBkKBopIW6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RdDIJLzw4B ++ cat /tmp/tmp.QBkKBopIW6 ++ rm /tmp/tmp.RdDIJLzw4B /tmp/tmp.QBkKBopIW6 ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RzV5C6gibd +++ mktemp ++ local LAST_ERR=/tmp/tmp.XRLYsZP0hO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RzV5C6gibd ++ cat /tmp/tmp.XRLYsZP0hO ++ rm /tmp/tmp.RzV5C6gibd /tmp/tmp.XRLYsZP0hO ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cULES09aQv +++ mktemp ++ local LAST_ERR=/tmp/tmp.YtIF4XvSpA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cULES09aQv ++ cat /tmp/tmp.YtIF4XvSpA ++ rm /tmp/tmp.cULES09aQv /tmp/tmp.YtIF4XvSpA ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bjNqgww9YC +++ mktemp ++ local LAST_ERR=/tmp/tmp.2rRIIVJjGJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bjNqgww9YC ++ cat /tmp/tmp.2rRIIVJjGJ ++ rm /tmp/tmp.bjNqgww9YC /tmp/tmp.2rRIIVJjGJ ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m5Vo5NNjxf +++ mktemp ++ local LAST_ERR=/tmp/tmp.OMV7AZgP26 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m5Vo5NNjxf ++ cat /tmp/tmp.OMV7AZgP26 ++ rm /tmp/tmp.m5Vo5NNjxf /tmp/tmp.OMV7AZgP26 ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lD487SMy1x +++ mktemp ++ local LAST_ERR=/tmp/tmp.tqw062vIEa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lD487SMy1x ++ cat /tmp/tmp.tqw062vIEa ++ rm /tmp/tmp.lD487SMy1x /tmp/tmp.tqw062vIEa ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WLqkPbIpjz +++ mktemp ++ local LAST_ERR=/tmp/tmp.VsNObJO3Tn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WLqkPbIpjz ++ cat /tmp/tmp.VsNObJO3Tn ++ rm /tmp/tmp.WLqkPbIpjz /tmp/tmp.VsNObJO3Tn ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iX85qbDhXV +++ mktemp ++ local LAST_ERR=/tmp/tmp.fxgsqtFDAd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iX85qbDhXV ++ cat /tmp/tmp.fxgsqtFDAd ++ rm /tmp/tmp.iX85qbDhXV /tmp/tmp.fxgsqtFDAd ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZYxZftoNRA +++ mktemp ++ local LAST_ERR=/tmp/tmp.acMqXmzecb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZYxZftoNRA ++ cat /tmp/tmp.acMqXmzecb ++ rm /tmp/tmp.ZYxZftoNRA /tmp/tmp.acMqXmzecb ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nCuyqMR4T9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fNbGRoEwU2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nCuyqMR4T9 ++ cat /tmp/tmp.fNbGRoEwU2 ++ rm /tmp/tmp.nCuyqMR4T9 /tmp/tmp.fNbGRoEwU2 ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1LPqMtBX8N +++ mktemp ++ local LAST_ERR=/tmp/tmp.lxvmiUAkqC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1LPqMtBX8N ++ cat /tmp/tmp.lxvmiUAkqC ++ rm /tmp/tmp.1LPqMtBX8N /tmp/tmp.lxvmiUAkqC ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YZ9iRdkEt6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1R8GRugKTU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YZ9iRdkEt6 ++ cat /tmp/tmp.1R8GRugKTU ++ rm /tmp/tmp.YZ9iRdkEt6 /tmp/tmp.1R8GRugKTU ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 13 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QnpXcFCOPc +++ mktemp ++ local LAST_ERR=/tmp/tmp.NpiVC1PGCD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QnpXcFCOPc ++ cat /tmp/tmp.NpiVC1PGCD ++ rm /tmp/tmp.QnpXcFCOPc /tmp/tmp.NpiVC1PGCD ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GAf7nYY3j6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fLoMwMmigU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GAf7nYY3j6 ++ cat /tmp/tmp.fLoMwMmigU ++ rm /tmp/tmp.GAf7nYY3j6 /tmp/tmp.fLoMwMmigU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JQSdsaJAXw +++ mktemp ++ local LAST_ERR=/tmp/tmp.rNnBNdTEuK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JQSdsaJAXw ++ cat /tmp/tmp.rNnBNdTEuK ++ rm /tmp/tmp.JQSdsaJAXw /tmp/tmp.rNnBNdTEuK ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6UU7YC6pYI ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.B8XjUwhxqF +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6UU7YC6pYI +++++ cat /tmp/tmp.B8XjUwhxqF +++++ rm /tmp/tmp.6UU7YC6pYI /tmp/tmp.B8XjUwhxqF +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kHIZNGF8Bl ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dd7tQPBwmS +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kHIZNGF8Bl +++++ cat /tmp/tmp.dd7tQPBwmS +++++ rm /tmp/tmp.kHIZNGF8Bl /tmp/tmp.dd7tQPBwmS +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g8mmvgHvj2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cpGw3FtrqX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g8mmvgHvj2 ++ cat /tmp/tmp.cpGw3FtrqX ++ rm /tmp/tmp.g8mmvgHvj2 /tmp/tmp.cpGw3FtrqX ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5qxzYhKAgP +++ mktemp ++ local LAST_ERR=/tmp/tmp.u0BF3pQNJ8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5qxzYhKAgP ++ cat /tmp/tmp.u0BF3pQNJ8 ++ rm /tmp/tmp.5qxzYhKAgP /tmp/tmp.u0BF3pQNJ8 ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.eAHkEcbyFX/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.o7HdUBlNzR ++ mktemp + local LAST_ERR=/tmp/tmp.zAq4WF5iax + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.o7HdUBlNzR secret/my-cluster-secrets patched + cat /tmp/tmp.zAq4WF5iax + rm /tmp/tmp.o7HdUBlNzR /tmp/tmp.zAq4WF5iax + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3afuSiJcAg +++ mktemp ++ local LAST_ERR=/tmp/tmp.Mzy0xtxM13 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3afuSiJcAg ++ cat /tmp/tmp.Mzy0xtxM13 ++ rm /tmp/tmp.3afuSiJcAg /tmp/tmp.Mzy0xtxM13 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eVysUXv9QY +++ mktemp ++ local LAST_ERR=/tmp/tmp.c7v0KrM2Og ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eVysUXv9QY ++ cat /tmp/tmp.c7v0KrM2Og ++ rm /tmp/tmp.eVysUXv9QY /tmp/tmp.c7v0KrM2Og ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xZ5ThgeBJ5 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Jv2PR78DSX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xZ5ThgeBJ5 +++++ cat /tmp/tmp.Jv2PR78DSX +++++ rm /tmp/tmp.xZ5ThgeBJ5 /tmp/tmp.Jv2PR78DSX +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.gkX8vUuFud ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.40tkhOnEDy +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.gkX8vUuFud +++++ cat /tmp/tmp.40tkhOnEDy +++++ rm /tmp/tmp.gkX8vUuFud /tmp/tmp.40tkhOnEDy +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uvRx4ZeVak +++ mktemp ++ local LAST_ERR=/tmp/tmp.DTIgLZoEhX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uvRx4ZeVak ++ cat /tmp/tmp.DTIgLZoEhX ++ rm /tmp/tmp.uvRx4ZeVak /tmp/tmp.DTIgLZoEhX ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0B1ly81QCN +++ mktemp ++ local LAST_ERR=/tmp/tmp.zSiusMFhUw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0B1ly81QCN ++ cat /tmp/tmp.zSiusMFhUw ++ rm /tmp/tmp.0B1ly81QCN /tmp/tmp.zSiusMFhUw ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.eAHkEcbyFX/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.97B4msmg18 ++ mktemp + local LAST_ERR=/tmp/tmp.YkgUpJCqZn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.97B4msmg18 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.YkgUpJCqZn + rm /tmp/tmp.97B4msmg18 /tmp/tmp.YkgUpJCqZn + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4OUW1uMk39 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kDRAugCSQL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4OUW1uMk39 ++ cat /tmp/tmp.kDRAugCSQL ++ rm /tmp/tmp.4OUW1uMk39 /tmp/tmp.kDRAugCSQL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8OJ1pzE53f +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZgipbvxBPQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8OJ1pzE53f ++ cat /tmp/tmp.ZgipbvxBPQ ++ rm /tmp/tmp.8OJ1pzE53f /tmp/tmp.ZgipbvxBPQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PXFMMQ7dmv +++ mktemp ++ local LAST_ERR=/tmp/tmp.OfPU77MMn2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PXFMMQ7dmv ++ cat /tmp/tmp.OfPU77MMn2 ++ rm /tmp/tmp.PXFMMQ7dmv /tmp/tmp.OfPU77MMn2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8Wq8NCcTSP +++ mktemp ++ local LAST_ERR=/tmp/tmp.VcPFOlZWfh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8Wq8NCcTSP ++ cat /tmp/tmp.VcPFOlZWfh ++ rm /tmp/tmp.8Wq8NCcTSP /tmp/tmp.VcPFOlZWfh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ucbLbOQ9fE +++ mktemp ++ local LAST_ERR=/tmp/tmp.DH8w9HMggH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ucbLbOQ9fE ++ cat /tmp/tmp.DH8w9HMggH ++ rm /tmp/tmp.ucbLbOQ9fE /tmp/tmp.DH8w9HMggH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.akMtox2LNq +++ mktemp ++ local LAST_ERR=/tmp/tmp.oHpD54bVUI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.akMtox2LNq ++ cat /tmp/tmp.oHpD54bVUI ++ rm /tmp/tmp.akMtox2LNq /tmp/tmp.oHpD54bVUI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TQPXd14f1S +++ mktemp ++ local LAST_ERR=/tmp/tmp.zt1t6I9epy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TQPXd14f1S ++ cat /tmp/tmp.zt1t6I9epy ++ rm /tmp/tmp.TQPXd14f1S /tmp/tmp.zt1t6I9epy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UQdkGRWkuP +++ mktemp ++ local LAST_ERR=/tmp/tmp.pCa5X86hJT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UQdkGRWkuP ++ cat /tmp/tmp.pCa5X86hJT ++ rm /tmp/tmp.UQdkGRWkuP /tmp/tmp.pCa5X86hJT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MmjJl0Qewf +++ mktemp ++ local LAST_ERR=/tmp/tmp.jGryUhafFW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MmjJl0Qewf ++ cat /tmp/tmp.jGryUhafFW ++ rm /tmp/tmp.MmjJl0Qewf /tmp/tmp.jGryUhafFW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b7PuDzbbc0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SYhJeDGAXn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b7PuDzbbc0 ++ cat /tmp/tmp.SYhJeDGAXn ++ rm /tmp/tmp.b7PuDzbbc0 /tmp/tmp.SYhJeDGAXn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zoZ8fe4IEf +++ mktemp ++ local LAST_ERR=/tmp/tmp.O57uohs0Eu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zoZ8fe4IEf ++ cat /tmp/tmp.O57uohs0Eu ++ rm /tmp/tmp.zoZ8fe4IEf /tmp/tmp.O57uohs0Eu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jAFALoT29u +++ mktemp ++ local LAST_ERR=/tmp/tmp.URCZcx0eHI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jAFALoT29u ++ cat /tmp/tmp.URCZcx0eHI ++ rm /tmp/tmp.jAFALoT29u /tmp/tmp.URCZcx0eHI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gAqEcjSXdn +++ mktemp ++ local LAST_ERR=/tmp/tmp.Soi1RoljEZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gAqEcjSXdn ++ cat /tmp/tmp.Soi1RoljEZ ++ rm /tmp/tmp.gAqEcjSXdn /tmp/tmp.Soi1RoljEZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8M8qmcu4s1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3GWUCj5phV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8M8qmcu4s1 ++ cat /tmp/tmp.3GWUCj5phV ++ rm /tmp/tmp.8M8qmcu4s1 /tmp/tmp.3GWUCj5phV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dQvyhHwKKh +++ mktemp ++ local LAST_ERR=/tmp/tmp.NGVv0yWoFF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dQvyhHwKKh ++ cat /tmp/tmp.NGVv0yWoFF ++ rm /tmp/tmp.dQvyhHwKKh /tmp/tmp.NGVv0yWoFF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tDyI9NdyQu +++ mktemp ++ local LAST_ERR=/tmp/tmp.SBJl4W5PJa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tDyI9NdyQu ++ cat /tmp/tmp.SBJl4W5PJa ++ rm /tmp/tmp.tDyI9NdyQu /tmp/tmp.SBJl4W5PJa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h7sGNMndjW +++ mktemp ++ local LAST_ERR=/tmp/tmp.TMsit3YSzj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h7sGNMndjW ++ cat /tmp/tmp.TMsit3YSzj ++ rm /tmp/tmp.h7sGNMndjW /tmp/tmp.TMsit3YSzj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w4wWSWP41a +++ mktemp ++ local LAST_ERR=/tmp/tmp.wFtPX4iFAy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w4wWSWP41a ++ cat /tmp/tmp.wFtPX4iFAy ++ rm /tmp/tmp.w4wWSWP41a /tmp/tmp.wFtPX4iFAy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a2xlbfnzEv +++ mktemp ++ local LAST_ERR=/tmp/tmp.71wHjHp8uQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a2xlbfnzEv ++ cat /tmp/tmp.71wHjHp8uQ ++ rm /tmp/tmp.a2xlbfnzEv /tmp/tmp.71wHjHp8uQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.scrTUoeGQF +++ mktemp ++ local LAST_ERR=/tmp/tmp.FPTffdydSa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.scrTUoeGQF ++ cat /tmp/tmp.FPTffdydSa ++ rm /tmp/tmp.scrTUoeGQF /tmp/tmp.FPTffdydSa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.swAYgp5I1j +++ mktemp ++ local LAST_ERR=/tmp/tmp.9u9BGnse3j ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.swAYgp5I1j ++ cat /tmp/tmp.9u9BGnse3j ++ rm /tmp/tmp.swAYgp5I1j /tmp/tmp.9u9BGnse3j ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pj8cJ7uFrf +++ mktemp ++ local LAST_ERR=/tmp/tmp.de7n4YgQ6J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Pj8cJ7uFrf ++ cat /tmp/tmp.de7n4YgQ6J ++ rm /tmp/tmp.Pj8cJ7uFrf /tmp/tmp.de7n4YgQ6J ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kSsbpImSci ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.NpFJFqAEUE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kSsbpImSci +++++ cat /tmp/tmp.NpFJFqAEUE +++++ rm /tmp/tmp.kSsbpImSci /tmp/tmp.NpFJFqAEUE +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.cWTycMgMDv ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.edry9dDdmW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.cWTycMgMDv +++++ cat /tmp/tmp.edry9dDdmW +++++ rm /tmp/tmp.cWTycMgMDv /tmp/tmp.edry9dDdmW +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8JkCU1EpZd +++ mktemp ++ local LAST_ERR=/tmp/tmp.gQfu37B9L2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8JkCU1EpZd ++ cat /tmp/tmp.gQfu37B9L2 ++ rm /tmp/tmp.8JkCU1EpZd /tmp/tmp.gQfu37B9L2 ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.al7j4j5wA8 ++ mktemp + local LAST_ERR=/tmp/tmp.YJLSe4WMRG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.al7j4j5wA8 secret/my-cluster-secrets-2 patched + cat /tmp/tmp.YJLSe4WMRG + rm /tmp/tmp.al7j4j5wA8 /tmp/tmp.YJLSe4WMRG + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S0fa9j4PiX +++ mktemp ++ local LAST_ERR=/tmp/tmp.hlmF3O2mA3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S0fa9j4PiX ++ cat /tmp/tmp.hlmF3O2mA3 ++ rm /tmp/tmp.S0fa9j4PiX /tmp/tmp.hlmF3O2mA3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.95zmc7EPta +++ mktemp ++ local LAST_ERR=/tmp/tmp.9gj7Lm9Nhk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.95zmc7EPta ++ cat /tmp/tmp.9gj7Lm9Nhk ++ rm /tmp/tmp.95zmc7EPta /tmp/tmp.9gj7Lm9Nhk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.83JMCZdhhQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.U7KsCqN1fz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.83JMCZdhhQ ++ cat /tmp/tmp.U7KsCqN1fz ++ rm /tmp/tmp.83JMCZdhhQ /tmp/tmp.U7KsCqN1fz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wQxHgKJtFm +++ mktemp ++ local LAST_ERR=/tmp/tmp.dzA3plfD2h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wQxHgKJtFm ++ cat /tmp/tmp.dzA3plfD2h ++ rm /tmp/tmp.wQxHgKJtFm /tmp/tmp.dzA3plfD2h ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.js3HbomRSa +++ mktemp ++ local LAST_ERR=/tmp/tmp.4DLjld73x4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.js3HbomRSa ++ cat /tmp/tmp.4DLjld73x4 ++ rm /tmp/tmp.js3HbomRSa /tmp/tmp.4DLjld73x4 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gsIOOhuqjd +++ mktemp ++ local LAST_ERR=/tmp/tmp.PeOXA8rLMS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gsIOOhuqjd ++ cat /tmp/tmp.PeOXA8rLMS ++ rm /tmp/tmp.gsIOOhuqjd /tmp/tmp.PeOXA8rLMS ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.wksTzKxcgy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.aRUE7HYaZ2 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.wksTzKxcgy +++++ cat /tmp/tmp.aRUE7HYaZ2 +++++ rm /tmp/tmp.wksTzKxcgy /tmp/tmp.aRUE7HYaZ2 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.dWXgqk20cx ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.VGixK8RX1P +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.dWXgqk20cx +++++ cat /tmp/tmp.VGixK8RX1P +++++ rm /tmp/tmp.dWXgqk20cx /tmp/tmp.VGixK8RX1P +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QFqq9SIxFc +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hqar2InqVe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QFqq9SIxFc ++ cat /tmp/tmp.Hqar2InqVe ++ rm /tmp/tmp.QFqq9SIxFc /tmp/tmp.Hqar2InqVe ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j0icyVCIO9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WuAVnWzGrP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j0icyVCIO9 ++ cat /tmp/tmp.WuAVnWzGrP ++ rm /tmp/tmp.j0icyVCIO9 /tmp/tmp.WuAVnWzGrP ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.eAHkEcbyFX/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.FQnuhoyd1h +++ mktemp ++ local LAST_ERR=/tmp/tmp.P2JVA6RdkK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FQnuhoyd1h ++ cat /tmp/tmp.P2JVA6RdkK ++ rm /tmp/tmp.FQnuhoyd1h /tmp/tmp.P2JVA6RdkK ++ return 0 + newpass='?]i[xuDUDjOQ~y4-y{D' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''?]i[xuDUDjOQ~y4-y{D'\'';' '-h some-name-pxc -uroot -p'\''?]i[xuDUDjOQ~y4-y{D'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''?]i[xuDUDjOQ~y4-y{D'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''?]i[xuDUDjOQ~y4-y{D'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HrH78jv1gC +++ mktemp ++ local LAST_ERR=/tmp/tmp.veBkpvwvmY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HrH78jv1gC ++ cat /tmp/tmp.veBkpvwvmY ++ rm /tmp/tmp.HrH78jv1gC /tmp/tmp.veBkpvwvmY ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''?]i[xuDUDjOQ~y4-y{D'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''?]i[xuDUDjOQ~y4-y{D'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''?]i[xuDUDjOQ~y4-y{D'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''?]i[xuDUDjOQ~y4-y{D'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pyo1pY80wG +++ mktemp ++ local LAST_ERR=/tmp/tmp.LzCtY6QClF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Pyo1pY80wG ++ cat /tmp/tmp.LzCtY6QClF ++ rm /tmp/tmp.Pyo1pY80wG /tmp/tmp.LzCtY6QClF ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.eAHkEcbyFX/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.xZJpN82DyO +++ mktemp ++ local LAST_ERR=/tmp/tmp.pIv1VWhHHO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xZJpN82DyO ++ cat /tmp/tmp.pIv1VWhHHO ++ rm /tmp/tmp.xZJpN82DyO /tmp/tmp.pIv1VWhHHO ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.WQ6rntLBuq ++ mktemp + local LAST_ERR=/tmp/tmp.hBSUBBfCHx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WQ6rntLBuq secret/my-cluster-secrets-2 configured + cat /tmp/tmp.hBSUBBfCHx Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.WQ6rntLBuq /tmp/tmp.hBSUBBfCHx + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vg4BgdRy7x +++ mktemp ++ local LAST_ERR=/tmp/tmp.JOA1EtnYYC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vg4BgdRy7x ++ cat /tmp/tmp.JOA1EtnYYC ++ rm /tmp/tmp.vg4BgdRy7x /tmp/tmp.JOA1EtnYYC ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.eAHkEcbyFX/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/conf/some-name.yml + local pvc_name= ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/conf/some-name.yml + local LAST_OUT=/tmp/tmp.zn3qAE3fSO ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-28609~ + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2289-047564f9#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + local LAST_ERR=/tmp/tmp.8SCEkBIlf9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zn3qAE3fSO perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.8SCEkBIlf9 + rm /tmp/tmp.zn3qAE3fSO /tmp/tmp.8SCEkBIlf9 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vH8QeLkzBO +++ mktemp ++ local LAST_ERR=/tmp/tmp.BXTlUAg33X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vH8QeLkzBO ++ cat /tmp/tmp.BXTlUAg33X ++ rm /tmp/tmp.vH8QeLkzBO /tmp/tmp.BXTlUAg33X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.92C2Pl8ssI +++ mktemp ++ local LAST_ERR=/tmp/tmp.3GoFDMxbR5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.92C2Pl8ssI ++ cat /tmp/tmp.3GoFDMxbR5 ++ rm /tmp/tmp.92C2Pl8ssI /tmp/tmp.3GoFDMxbR5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QjFww9qsMc +++ mktemp ++ local LAST_ERR=/tmp/tmp.L1ghuSe4sJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QjFww9qsMc ++ cat /tmp/tmp.L1ghuSe4sJ ++ rm /tmp/tmp.QjFww9qsMc /tmp/tmp.L1ghuSe4sJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lmcADEKaIB +++ mktemp ++ local LAST_ERR=/tmp/tmp.RAFVxkVYes ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lmcADEKaIB ++ cat /tmp/tmp.RAFVxkVYes ++ rm /tmp/tmp.lmcADEKaIB /tmp/tmp.RAFVxkVYes ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G1eXxcVvVP +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q6Ghkj6IuR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G1eXxcVvVP ++ cat /tmp/tmp.Q6Ghkj6IuR ++ rm /tmp/tmp.G1eXxcVvVP /tmp/tmp.Q6Ghkj6IuR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W4LeGgXcOh +++ mktemp ++ local LAST_ERR=/tmp/tmp.96pTUrupC1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W4LeGgXcOh ++ cat /tmp/tmp.96pTUrupC1 ++ rm /tmp/tmp.W4LeGgXcOh /tmp/tmp.96pTUrupC1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.19DP7Ur353 +++ mktemp ++ local LAST_ERR=/tmp/tmp.G38lQws0zj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.19DP7Ur353 ++ cat /tmp/tmp.G38lQws0zj ++ rm /tmp/tmp.19DP7Ur353 /tmp/tmp.G38lQws0zj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.twlmiweF8R +++ mktemp ++ local LAST_ERR=/tmp/tmp.XTRtmIl2qq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.twlmiweF8R ++ cat /tmp/tmp.XTRtmIl2qq ++ rm /tmp/tmp.twlmiweF8R /tmp/tmp.XTRtmIl2qq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iPAeJfY4JM +++ mktemp ++ local LAST_ERR=/tmp/tmp.EkLYXhSgvW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iPAeJfY4JM ++ cat /tmp/tmp.EkLYXhSgvW ++ rm /tmp/tmp.iPAeJfY4JM /tmp/tmp.EkLYXhSgvW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SEmOWI6mQG +++ mktemp ++ local LAST_ERR=/tmp/tmp.weku53AGIi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SEmOWI6mQG ++ cat /tmp/tmp.weku53AGIi ++ rm /tmp/tmp.SEmOWI6mQG /tmp/tmp.weku53AGIi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8kwcoEta7x +++ mktemp ++ local LAST_ERR=/tmp/tmp.EJ8ke3ZnR7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8kwcoEta7x ++ cat /tmp/tmp.EJ8ke3ZnR7 ++ rm /tmp/tmp.8kwcoEta7x /tmp/tmp.EJ8ke3ZnR7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RdPkojIYkG +++ mktemp ++ local LAST_ERR=/tmp/tmp.4iAGSJrlIt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RdPkojIYkG ++ cat /tmp/tmp.4iAGSJrlIt ++ rm /tmp/tmp.RdPkojIYkG /tmp/tmp.4iAGSJrlIt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2UXlxeD9KH +++ mktemp ++ local LAST_ERR=/tmp/tmp.muSaKwKrcl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2UXlxeD9KH ++ cat /tmp/tmp.muSaKwKrcl ++ rm /tmp/tmp.2UXlxeD9KH /tmp/tmp.muSaKwKrcl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d2RL4QMpD7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.QDM3dcd46L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d2RL4QMpD7 ++ cat /tmp/tmp.QDM3dcd46L ++ rm /tmp/tmp.d2RL4QMpD7 /tmp/tmp.QDM3dcd46L ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ReFBgugoI9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8RPIC2kN6R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ReFBgugoI9 ++ cat /tmp/tmp.8RPIC2kN6R ++ rm /tmp/tmp.ReFBgugoI9 /tmp/tmp.8RPIC2kN6R ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MN2OvNIvuU +++ mktemp ++ local LAST_ERR=/tmp/tmp.yXC4SxrXKF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MN2OvNIvuU ++ cat /tmp/tmp.yXC4SxrXKF ++ rm /tmp/tmp.MN2OvNIvuU /tmp/tmp.yXC4SxrXKF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HzUFLyS6Cf +++ mktemp ++ local LAST_ERR=/tmp/tmp.jSMOp5ewY8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HzUFLyS6Cf ++ cat /tmp/tmp.jSMOp5ewY8 ++ rm /tmp/tmp.HzUFLyS6Cf /tmp/tmp.jSMOp5ewY8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wf50Rcxkdn +++ mktemp ++ local LAST_ERR=/tmp/tmp.WEzrpTQvNf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wf50Rcxkdn ++ cat /tmp/tmp.WEzrpTQvNf ++ rm /tmp/tmp.wf50Rcxkdn /tmp/tmp.WEzrpTQvNf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.32i5R3TjaA +++ mktemp ++ local LAST_ERR=/tmp/tmp.hD3HFSoGHN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.32i5R3TjaA ++ cat /tmp/tmp.hD3HFSoGHN ++ rm /tmp/tmp.32i5R3TjaA /tmp/tmp.hD3HFSoGHN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6Sk29Zow1S +++ mktemp ++ local LAST_ERR=/tmp/tmp.dOjuaVEWkZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6Sk29Zow1S ++ cat /tmp/tmp.dOjuaVEWkZ ++ rm /tmp/tmp.6Sk29Zow1S /tmp/tmp.dOjuaVEWkZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QlhkV8ZgGD +++ mktemp ++ local LAST_ERR=/tmp/tmp.eDLukYeZ4B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QlhkV8ZgGD ++ cat /tmp/tmp.eDLukYeZ4B ++ rm /tmp/tmp.QlhkV8ZgGD /tmp/tmp.eDLukYeZ4B ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xPj7NJXaNs +++ mktemp ++ local LAST_ERR=/tmp/tmp.sxgyKIHZGo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xPj7NJXaNs ++ cat /tmp/tmp.sxgyKIHZGo ++ rm /tmp/tmp.xPj7NJXaNs /tmp/tmp.sxgyKIHZGo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rRxEIwwfZc +++ mktemp ++ local LAST_ERR=/tmp/tmp.QTyAcckrTt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rRxEIwwfZc ++ cat /tmp/tmp.QTyAcckrTt ++ rm /tmp/tmp.rRxEIwwfZc /tmp/tmp.QTyAcckrTt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FSpyVJagog +++ mktemp ++ local LAST_ERR=/tmp/tmp.tUjN2Z5Sto ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FSpyVJagog ++ cat /tmp/tmp.tUjN2Z5Sto ++ rm /tmp/tmp.FSpyVJagog /tmp/tmp.tUjN2Z5Sto ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hiFI1epYCB +++ mktemp ++ local LAST_ERR=/tmp/tmp.ruKIS8ytWK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hiFI1epYCB ++ cat /tmp/tmp.ruKIS8ytWK ++ rm /tmp/tmp.hiFI1epYCB /tmp/tmp.ruKIS8ytWK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yANx5xpxuf +++ mktemp ++ local LAST_ERR=/tmp/tmp.YEGSfGzIvm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yANx5xpxuf ++ cat /tmp/tmp.YEGSfGzIvm ++ rm /tmp/tmp.yANx5xpxuf /tmp/tmp.YEGSfGzIvm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.alJkYDadVb +++ mktemp ++ local LAST_ERR=/tmp/tmp.TfzJNllhri ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.alJkYDadVb ++ cat /tmp/tmp.TfzJNllhri ++ rm /tmp/tmp.alJkYDadVb /tmp/tmp.TfzJNllhri ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pSDrkFBD99 +++ mktemp ++ local LAST_ERR=/tmp/tmp.l6fBTLVKDN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pSDrkFBD99 ++ cat /tmp/tmp.l6fBTLVKDN ++ rm /tmp/tmp.pSDrkFBD99 /tmp/tmp.l6fBTLVKDN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OvadwLcBQZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.6UPL2PuFDB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OvadwLcBQZ ++ cat /tmp/tmp.6UPL2PuFDB ++ rm /tmp/tmp.OvadwLcBQZ /tmp/tmp.6UPL2PuFDB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uGuipVR9Gu +++ mktemp ++ local LAST_ERR=/tmp/tmp.AYMd5YDurF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uGuipVR9Gu ++ cat /tmp/tmp.AYMd5YDurF ++ rm /tmp/tmp.uGuipVR9Gu /tmp/tmp.AYMd5YDurF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rNhPff6pX4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9YbHs0A1km ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rNhPff6pX4 ++ cat /tmp/tmp.9YbHs0A1km ++ rm /tmp/tmp.rNhPff6pX4 /tmp/tmp.9YbHs0A1km ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OSo5duXrKs +++ mktemp ++ local LAST_ERR=/tmp/tmp.TsQJzq0aif ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OSo5duXrKs ++ cat /tmp/tmp.TsQJzq0aif ++ rm /tmp/tmp.OSo5duXrKs /tmp/tmp.TsQJzq0aif ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QQfcHugN0z +++ mktemp ++ local LAST_ERR=/tmp/tmp.abbrXKDNuE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QQfcHugN0z ++ cat /tmp/tmp.abbrXKDNuE ++ rm /tmp/tmp.QQfcHugN0z /tmp/tmp.abbrXKDNuE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J5RyY5Agjs +++ mktemp ++ local LAST_ERR=/tmp/tmp.4xDyRfptkT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J5RyY5Agjs ++ cat /tmp/tmp.4xDyRfptkT ++ rm /tmp/tmp.J5RyY5Agjs /tmp/tmp.4xDyRfptkT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gRZZifcFSw +++ mktemp ++ local LAST_ERR=/tmp/tmp.UmdzNj8AtK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gRZZifcFSw ++ cat /tmp/tmp.UmdzNj8AtK ++ rm /tmp/tmp.gRZZifcFSw /tmp/tmp.UmdzNj8AtK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VI1BV8Doe1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YQ2x6rOU49 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VI1BV8Doe1 ++ cat /tmp/tmp.YQ2x6rOU49 ++ rm /tmp/tmp.VI1BV8Doe1 /tmp/tmp.YQ2x6rOU49 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9f3s9njGLD +++ mktemp ++ local LAST_ERR=/tmp/tmp.RyUISB4YUx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9f3s9njGLD ++ cat /tmp/tmp.RyUISB4YUx ++ rm /tmp/tmp.9f3s9njGLD /tmp/tmp.RyUISB4YUx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lo5kukILZp +++ mktemp ++ local LAST_ERR=/tmp/tmp.MOvFPDyITU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lo5kukILZp ++ cat /tmp/tmp.MOvFPDyITU ++ rm /tmp/tmp.Lo5kukILZp /tmp/tmp.MOvFPDyITU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3vkyMlDRXe +++ mktemp ++ local LAST_ERR=/tmp/tmp.3XO0oulEn6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3vkyMlDRXe ++ cat /tmp/tmp.3XO0oulEn6 ++ rm /tmp/tmp.3vkyMlDRXe /tmp/tmp.3XO0oulEn6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RWshONaJ0r +++ mktemp ++ local LAST_ERR=/tmp/tmp.YFVTikLwlc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RWshONaJ0r ++ cat /tmp/tmp.YFVTikLwlc ++ rm /tmp/tmp.RWshONaJ0r /tmp/tmp.YFVTikLwlc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dvjXMOHZIo +++ mktemp ++ local LAST_ERR=/tmp/tmp.DlAcKlA42O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dvjXMOHZIo ++ cat /tmp/tmp.DlAcKlA42O ++ rm /tmp/tmp.dvjXMOHZIo /tmp/tmp.DlAcKlA42O ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.thW5PHUe41 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8YfjlhPaS7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.thW5PHUe41 ++ cat /tmp/tmp.8YfjlhPaS7 ++ rm /tmp/tmp.thW5PHUe41 /tmp/tmp.8YfjlhPaS7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pngjBk7uUQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.jNljVVTcB4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pngjBk7uUQ ++ cat /tmp/tmp.jNljVVTcB4 ++ rm /tmp/tmp.pngjBk7uUQ /tmp/tmp.jNljVVTcB4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G1FF4AljgY +++ mktemp ++ local LAST_ERR=/tmp/tmp.hXL7q7Lhq2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G1FF4AljgY ++ cat /tmp/tmp.hXL7q7Lhq2 ++ rm /tmp/tmp.G1FF4AljgY /tmp/tmp.hXL7q7Lhq2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PXzpXdVS38 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6ziUO9EEA0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PXzpXdVS38 ++ cat /tmp/tmp.6ziUO9EEA0 ++ rm /tmp/tmp.PXzpXdVS38 /tmp/tmp.6ziUO9EEA0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wr4uT5TeDl +++ mktemp ++ local LAST_ERR=/tmp/tmp.RKu6bwINT4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Wr4uT5TeDl ++ cat /tmp/tmp.RKu6bwINT4 ++ rm /tmp/tmp.Wr4uT5TeDl /tmp/tmp.RKu6bwINT4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qxLkcMdjUg +++ mktemp ++ local LAST_ERR=/tmp/tmp.kKmN4xNl4d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qxLkcMdjUg ++ cat /tmp/tmp.kKmN4xNl4d ++ rm /tmp/tmp.qxLkcMdjUg /tmp/tmp.kKmN4xNl4d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hd8oH5uOo8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dTU9eeNNKk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hd8oH5uOo8 ++ cat /tmp/tmp.dTU9eeNNKk ++ rm /tmp/tmp.hd8oH5uOo8 /tmp/tmp.dTU9eeNNKk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fezGeexbfe +++ mktemp ++ local LAST_ERR=/tmp/tmp.rrzZA7kc0L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fezGeexbfe ++ cat /tmp/tmp.rrzZA7kc0L ++ rm /tmp/tmp.fezGeexbfe /tmp/tmp.rrzZA7kc0L ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dRah7RLtTZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.WB43zYUX8i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dRah7RLtTZ ++ cat /tmp/tmp.WB43zYUX8i ++ rm /tmp/tmp.dRah7RLtTZ /tmp/tmp.WB43zYUX8i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BIkbeaaNgU +++ mktemp ++ local LAST_ERR=/tmp/tmp.y1ezowGysP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BIkbeaaNgU ++ cat /tmp/tmp.y1ezowGysP ++ rm /tmp/tmp.BIkbeaaNgU /tmp/tmp.y1ezowGysP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qWyBsnDXjS +++ mktemp ++ local LAST_ERR=/tmp/tmp.HSbPBzUt62 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qWyBsnDXjS ++ cat /tmp/tmp.HSbPBzUt62 ++ rm /tmp/tmp.qWyBsnDXjS /tmp/tmp.HSbPBzUt62 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 51 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JjVBVPOZFD +++ mktemp ++ local LAST_ERR=/tmp/tmp.upZQtNwtHb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JjVBVPOZFD ++ cat /tmp/tmp.upZQtNwtHb ++ rm /tmp/tmp.JjVBVPOZFD /tmp/tmp.upZQtNwtHb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 52 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ApRSdrWPdL +++ mktemp ++ local LAST_ERR=/tmp/tmp.aHtOKS5O47 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ApRSdrWPdL ++ cat /tmp/tmp.aHtOKS5O47 ++ rm /tmp/tmp.ApRSdrWPdL /tmp/tmp.aHtOKS5O47 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X51yFqiGXD +++ mktemp ++ local LAST_ERR=/tmp/tmp.aH8oeCVT0H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X51yFqiGXD ++ cat /tmp/tmp.aH8oeCVT0H ++ rm /tmp/tmp.X51yFqiGXD /tmp/tmp.aH8oeCVT0H ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.11cyQwkVAZ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZnKLFJdJ6O +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.11cyQwkVAZ +++++ cat /tmp/tmp.ZnKLFJdJ6O +++++ rm /tmp/tmp.11cyQwkVAZ /tmp/tmp.ZnKLFJdJ6O +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NiNweEkEjM +++ mktemp ++ local LAST_ERR=/tmp/tmp.TprkosihaF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NiNweEkEjM ++ cat /tmp/tmp.TprkosihaF ++ rm /tmp/tmp.NiNweEkEjM /tmp/tmp.TprkosihaF ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OvtfsR2UfB +++ mktemp ++ local LAST_ERR=/tmp/tmp.3TQt9OrlRW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OvtfsR2UfB ++ cat /tmp/tmp.3TQt9OrlRW ++ rm /tmp/tmp.OvtfsR2UfB /tmp/tmp.3TQt9OrlRW ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.g02N5nUchr ++ mktemp + local LAST_ERR=/tmp/tmp.sxxWSe1SiK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.g02N5nUchr secret/my-cluster-secrets patched + cat /tmp/tmp.sxxWSe1SiK + rm /tmp/tmp.g02N5nUchr /tmp/tmp.sxxWSe1SiK + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.phiu7H6CfF +++ mktemp ++ local LAST_ERR=/tmp/tmp.rdzE3TxXCN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.phiu7H6CfF ++ cat /tmp/tmp.rdzE3TxXCN ++ rm /tmp/tmp.phiu7H6CfF /tmp/tmp.rdzE3TxXCN ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EsV5aosqQI +++ mktemp ++ local LAST_ERR=/tmp/tmp.CUyyMwhJHb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EsV5aosqQI ++ cat /tmp/tmp.CUyyMwhJHb ++ rm /tmp/tmp.EsV5aosqQI /tmp/tmp.CUyyMwhJHb ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.TpXHIKWW4j ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.48nplpbCtX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.TpXHIKWW4j +++++ cat /tmp/tmp.48nplpbCtX +++++ rm /tmp/tmp.TpXHIKWW4j /tmp/tmp.48nplpbCtX +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bUON40m5ti +++ mktemp ++ local LAST_ERR=/tmp/tmp.opn9n7gW1I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bUON40m5ti ++ cat /tmp/tmp.opn9n7gW1I ++ rm /tmp/tmp.bUON40m5ti /tmp/tmp.opn9n7gW1I ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Emayqzczhv +++ mktemp ++ local LAST_ERR=/tmp/tmp.mEgBXlThvD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Emayqzczhv ++ cat /tmp/tmp.mEgBXlThvD ++ rm /tmp/tmp.Emayqzczhv /tmp/tmp.mEgBXlThvD ++ return 0 + client_pod=pxc-client-59944c5bbf-6wwzl + wait_pod pxc-client-59944c5bbf-6wwzl + local pod=pxc-client-59944c5bbf-6wwzl + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-6wwzl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-6wwzl condition met waiting for pod/pxc-client-59944c5bbf-6wwzl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.eAHkEcbyFX/select-3.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.eAHkEcbyFX/select-3.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2289/e2e-tests/users/compare/select-3.sql /tmp/tmp.eAHkEcbyFX/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UsXpKwjVBi +++ mktemp ++ local LAST_ERR=/tmp/tmp.AWCW4B506V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UsXpKwjVBi ++ cat /tmp/tmp.AWCW4B506V ++ rm /tmp/tmp.UsXpKwjVBi /tmp/tmp.AWCW4B506V ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-28609 + local namespace=users-28609 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + sort -u ++ get_operator_pod + tee /tmp/tmp.eAHkEcbyFX/operator.log ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.EZJl5QtpBe +++ mktemp ++ local LAST_ERR=/tmp/tmp.q5X40AuHp8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EZJl5QtpBe ++ cat /tmp/tmp.q5X40AuHp8 ++ rm /tmp/tmp.EZJl5QtpBe /tmp/tmp.q5X40AuHp8 ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-56f8579949-89wjt ++ mktemp + local LAST_OUT=/tmp/tmp.dfXsikHaUt ++ mktemp + local LAST_ERR=/tmp/tmp.RaWt0IOLeF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-56f8579949-89wjt + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dfXsikHaUt + cat /tmp/tmp.RaWt0IOLeF + rm /tmp/tmp.dfXsikHaUt /tmp/tmp.RaWt0IOLeF + return 0 } }, }, { }, }, { }, }, ""), }, { }, }, }, - }, - { - }, - { - }, - }, + }, ... // 16 identical fields ... // 16 identical fields - "1a1ee814a764af973da75804b6d460e2c4c5723ab4436ab4f9e48e078bdc880", + "1a1ee814a764af973da75804b6d460e2c4c5723ab4436ab4f9e48e078bdc880b", "2", 2025-12-05T13:14:15.048Z INFO setup Manager starting up {"gitCommit": "047564f967d22212a5c44e1d3a8bd21338781593", "gitBranch": "PR-2289-047564f9", "buildTime": "2025-12-05T10:07:08Z", "goVersion": "go1.25.5", "os": "linux", "arch": "amd64"} 2025-12-05T13:14:15.048Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.14-gke.1033000"} 2025-12-05T13:14:15.051Z INFO setup Registering Components. 2025-12-05T13:14:15.613Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-12-05T13:14:15.613Z INFO controller-runtime.metrics Starting metrics server 2025-12-05T13:14:15.613Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-12-05T13:14:15.613Z INFO controller-runtime.webhook Starting webhook server 2025-12-05T13:14:15.613Z INFO setup Starting the Cmd. 2025-12-05T13:14:15.613Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-12-05T13:14:15.614Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-12-05T13:14:15.614Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-12-05T13:14:15.614Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-12-05T13:14:15.715Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-12-05T13:14:15.768Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-12-05T13:14:15.769Z DEBUG events percona-xtradb-cluster-operator-56f8579949-89wjt_b935013b-1049-4cab-99bf-0b1a5dd966c0 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"1c50da0b-a025-4420-80d7-2dee8712a7c7","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1764940455762383009"}, "reason": "LeaderElection"} 2025-12-05T13:14:15.769Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-12-05T13:14:15.769Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-12-05T13:14:15.769Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2025-12-05T13:14:15.769Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-12-05T13:14:15.870Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2025-12-05T13:14:15.870Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2025-12-05T13:14:15.870Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2025-12-05T13:14:15.870Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2025-12-05T13:14:15.870Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2025-12-05T13:14:15.870Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2025-12-05T13:14:42.794Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "9dd88962-d2d1-40c9-a508-6542b97546de", "version": "1.19.0"} 2025-12-05T13:14:42.927Z INFO User secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "9dd88962-d2d1-40c9-a508-6542b97546de", "secrets": "my-cluster-secrets"} 2025-12-05T13:14:43.144Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "9dd88962-d2d1-40c9-a508-6542b97546de", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-12-05T13:14:43.266Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "9dd88962-d2d1-40c9-a508-6542b97546de", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-05T13:14:43.311Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "9dd88962-d2d1-40c9-a508-6542b97546de", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-05T13:14:43.368Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "9dd88962-d2d1-40c9-a508-6542b97546de", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-05T13:14:43.402Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "9dd88962-d2d1-40c9-a508-6542b97546de", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-05T13:14:43.450Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "9dd88962-d2d1-40c9-a508-6542b97546de", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-05T13:14:43.567Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "9dd88962-d2d1-40c9-a508-6542b97546de", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-05T13:14:44.402Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "7ccb30ca-0a1e-4398-92fa-6bb328ef2932", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-05T13:14:44.420Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "7ccb30ca-0a1e-4398-92fa-6bb328ef2932", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-05T13:16:00.848Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "060463c9-08f0-4fc7-96a9-822f4a081a23", "user": "operator"} 2025-12-05T13:16:00.883Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "060463c9-08f0-4fc7-96a9-822f4a081a23", "user": "monitor"} 2025-12-05T13:16:00.934Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "060463c9-08f0-4fc7-96a9-822f4a081a23"} 2025-12-05T13:16:00.968Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "060463c9-08f0-4fc7-96a9-822f4a081a23"} 2025-12-05T13:16:00.998Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "060463c9-08f0-4fc7-96a9-822f4a081a23", "user": "xtrabackup"} 2025-12-05T13:16:01.039Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "060463c9-08f0-4fc7-96a9-822f4a081a23"} 2025-12-05T13:16:01.070Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "060463c9-08f0-4fc7-96a9-822f4a081a23", "user": "replication"} 2025-12-05T13:16:01.078Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "060463c9-08f0-4fc7-96a9-822f4a081a23", "err": "get primary pxc pod: not found"} 2025-12-05T13:16:05.774Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "8675a1e2-66a1-4cb0-9805-32ddd2fe7cf3", "err": "get primary pxc pod: not found"} 2025-12-05T13:16:10.919Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "2002af82-6251-490e-ac54-4e785c773d10", "err": "get primary pxc pod: not found"} 2025-12-05T13:16:16.044Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4af6a8d7-503a-4081-a137-ea4a27a3704b", "err": "get primary pxc pod: not found"} 2025-12-05T13:18:27.388Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b9ac966c-5641-4a29-88f6-c2342b8801eb", "user": "root"} 2025-12-05T13:18:27.497Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b9ac966c-5641-4a29-88f6-c2342b8801eb", "new version": "8.0.43-34.1"} 2025-12-05T13:18:29.231Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b9ac966c-5641-4a29-88f6-c2342b8801eb"} 2025-12-05T13:18:34.333Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "623e4260-bebb-4acb-b595-53c864487977"} 2025-12-05T13:18:39.704Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "54bafa2a-4349-4202-b573-9761d4bc4dee"} 2025-12-05T13:18:45.033Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "972b93c6-9321-43ee-b1e4-52a8d82bcde6"} 2025-12-05T13:18:49.913Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "ab380024-d128-4ae5-be53-3f943c2e15ba"} 2025-12-05T13:18:55.713Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "f3271841-2d56-448f-b2ba-ee987e94dffa"} 2025-12-05T13:19:01.130Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "37b0fa23-bd4c-403d-9ec6-e5149d48f470"} 2025-12-05T13:19:06.710Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "1501b3c4-2089-4d5c-85b7-85705418b8a6"} 2025-12-05T13:19:11.719Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "3c418b85-f157-486f-8226-3ed9dab48f37"} 2025-12-05T13:19:17.052Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "bad21c54-7721-4bcc-a4f3-420c0dc820d0"} 2025-12-05T13:19:22.526Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "7125d17c-30d4-49b2-a096-679c788eb4bc"} 2025-12-05T13:19:27.915Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4d61fc67-addd-44b7-a13f-26abde0fc82b"} 2025-12-05T13:19:33.420Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "22ed16e8-29cd-4982-b7d0-4166f62df355"} 2025-12-05T13:19:39.027Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "58c40f1d-92a7-4e5a-bd23-3e4825574f6f"} 2025-12-05T13:19:44.103Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "45ec40f6-55de-44f4-b36f-0f2dee4e6810"} 2025-12-05T13:19:46.416Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4c4f7aee-6df3-4693-a9f8-a655779a6bfc", "user": "root"} 2025-12-05T13:19:46.440Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4c4f7aee-6df3-4693-a9f8-a655779a6bfc", "user": "root"} 2025-12-05T13:19:46.458Z INFO MySQL init secret created {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4c4f7aee-6df3-4693-a9f8-a655779a6bfc", "secret": "some-name-mysql-init", "user": "root"} 2025-12-05T13:19:49.447Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4c4f7aee-6df3-4693-a9f8-a655779a6bfc"} 2025-12-05T13:19:49.470Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4c4f7aee-6df3-4693-a9f8-a655779a6bfc", "user": "root"} 2025-12-05T13:19:49.490Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4c4f7aee-6df3-4693-a9f8-a655779a6bfc", "user": "root"} 2025-12-05T13:19:51.409Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4c4f7aee-6df3-4693-a9f8-a655779a6bfc"} 2025-12-05T13:19:56.524Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "f573a3eb-5a57-4750-8d26-77660b02ab50"} 2025-12-05T13:20:01.926Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "0bff5378-f066-4cca-9efa-fa1f38c5f49a"} 2025-12-05T13:20:05.985Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "188ac839-3e4f-4f52-921c-9308825a3771", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:20:07.393Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "79e9f812-ec03-4e9d-8740-4d04f9ec68d4"} 2025-12-05T13:20:31.210Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6bfeb030-d51d-4098-838e-74b1bfe06787", "user": "proxyadmin"} 2025-12-05T13:20:31.210Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6bfeb030-d51d-4098-838e-74b1bfe06787", "user": "proxyadmin"} 2025-12-05T13:20:31.238Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6bfeb030-d51d-4098-838e-74b1bfe06787", "user": "proxyadmin"} 2025-12-05T13:20:31.266Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6bfeb030-d51d-4098-838e-74b1bfe06787", "user": "proxyadmin"} 2025-12-05T13:20:31.266Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6bfeb030-d51d-4098-838e-74b1bfe06787", "last-applied-secret": "a2a4640afab8db39535e5b142a78df185b4129cbe3a31e6c4524281285f66748"} 2025-12-05T13:20:31.270Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6bfeb030-d51d-4098-838e-74b1bfe06787", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:20:31.510Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "21542f6e-8847-43b0-80a8-a6b951161a64", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 137 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 137 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-05T13:20:48.084Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "ce08be1d-9a7b-4d4a-b0e5-bfab8033a20d", "err": "get primary pxc pod: not found"} 2025-12-05T13:21:13.010Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "550bfc91-a8b2-4b6a-b27c-de6ff51c36bf"} 2025-12-05T13:21:16.408Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "822bccde-82d9-43b7-ae86-7a746b6533c3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:21:16.462Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "822bccde-82d9-43b7-ae86-7a746b6533c3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:21:18.498Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b6974f5b-19c6-4631-be9b-b72bdc922acf", "user": "xtrabackup"} 2025-12-05T13:21:18.511Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b6974f5b-19c6-4631-be9b-b72bdc922acf", "user": "xtrabackup"} 2025-12-05T13:21:18.535Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b6974f5b-19c6-4631-be9b-b72bdc922acf", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-05T13:21:18.556Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b6974f5b-19c6-4631-be9b-b72bdc922acf", "user": "xtrabackup"} 2025-12-05T13:21:18.569Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b6974f5b-19c6-4631-be9b-b72bdc922acf", "user": "xtrabackup"} 2025-12-05T13:21:18.576Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b6974f5b-19c6-4631-be9b-b72bdc922acf", "last-applied-secret": "9e89dc8b254dfebd0af02bc9edc244e8d08cc5789e09ef2a8eb6b8d057138482"} 2025-12-05T13:21:18.580Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b6974f5b-19c6-4631-be9b-b72bdc922acf", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:21:18.887Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "456c5eed-2757-458f-a944-50de06e8b67b", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-05T13:21:23.725Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b6974f5b-19c6-4631-be9b-b72bdc922acf", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-28609 on 34.118.224.10:53: no such host"} 2025-12-05T13:22:06.198Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "21b977de-963d-4ff0-92f4-cf5f64d65afc", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28609 on 34.118.224.10:53: no such host"} 2025-12-05T13:22:11.632Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "ab222faa-b744-47aa-b099-2a759074ed3c", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28609 on 34.118.224.10:53: no such host"} 2025-12-05T13:22:17.460Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "0d3c4325-1de9-4d59-b84f-eb250fde4d06", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28609 on 34.118.224.10:53: no such host"} 2025-12-05T13:23:09.766Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "8e286cf9-5d06-40a9-a023-fad1291baca6", "primary name": "some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local"} 2025-12-05T13:23:14.980Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5833a3ab-501f-40ad-962b-7e837ce7b14f", "primary name": "some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local"} 2025-12-05T13:23:20.185Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "39ae997a-7743-47cc-9979-093551352767", "primary name": "some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local"} 2025-12-05T13:23:25.343Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "71e4efe7-5e91-40f3-b67e-c2252bbb8b37", "primary name": "some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local"} 2025-12-05T13:23:30.496Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "8d436bb3-7380-4050-abd5-90067ed47d93", "primary name": "some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local"} 2025-12-05T13:23:35.648Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "716ef89f-280a-4301-a3f5-295b992b4b80", "primary name": "some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local"} 2025-12-05T13:23:40.790Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "422fa264-4d0f-49fe-a286-86c8c4e88f31", "primary name": "some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local"} 2025-12-05T13:23:45.936Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "1c64fec8-e156-4265-8a07-8bc1126a6c8c", "primary name": "some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local"} 2025-12-05T13:23:54.325Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "190a0ec5-7cb1-45ce-8e2c-1f96067939e2"} 2025-12-05T13:23:58.467Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c7667c9f-7931-4bb4-bce2-bf1ce2b7262e", "user": "monitor"} 2025-12-05T13:23:58.485Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c7667c9f-7931-4bb4-bce2-bf1ce2b7262e", "user": "monitor"} 2025-12-05T13:23:58.504Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c7667c9f-7931-4bb4-bce2-bf1ce2b7262e", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-05T13:23:58.523Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c7667c9f-7931-4bb4-bce2-bf1ce2b7262e", "user": "monitor"} 2025-12-05T13:23:58.544Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c7667c9f-7931-4bb4-bce2-bf1ce2b7262e", "user": "monitor"} 2025-12-05T13:23:58.892Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c7667c9f-7931-4bb4-bce2-bf1ce2b7262e", "last-applied-secret": "8dabca2723fabb6ac6faf31c1e85406c93850e24d53c55bc567474bffadec216"} 2025-12-05T13:23:58.896Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c7667c9f-7931-4bb4-bce2-bf1ce2b7262e", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:24:00.374Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6ae78f7b-5fde-442e-ad01-9ddf21ea76ee", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-1\" not found / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: xtrabackup\nAdding user to ProxySQL: xtrabackup\n Added query rule for user: xtrabackup\nRemoving existing user from ProxySQL: root\nAdding user to ProxySQL: root\n / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-1\" not found / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: xtrabackup\nAdding user to ProxySQL: xtrabackup\n Added query rule for user: xtrabackup\nRemoving existing user from ProxySQL: root\nAdding user to ProxySQL: root\n / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-05T13:24:41.377Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "2bd9cd95-33c0-42a6-b98a-94996a396f26", "user": "monitor"} 2025-12-05T13:24:44.084Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "2bd9cd95-33c0-42a6-b98a-94996a396f26"} 2025-12-05T13:24:46.384Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6526cb14-fde5-4cd6-993b-6bf67cf4809e", "user": "monitor"} 2025-12-05T13:24:51.368Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6526cb14-fde5-4cd6-993b-6bf67cf4809e"} 2025-12-05T13:24:55.084Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "d1a295af-ad3f-43f7-9a6e-f39abd4d9991", "user": "monitor"} 2025-12-05T13:24:57.283Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "d1a295af-ad3f-43f7-9a6e-f39abd4d9991"} 2025-12-05T13:25:00.737Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "80e19274-3a8e-4017-adc1-73749cbbb9fc", "user": "monitor"} 2025-12-05T13:25:03.161Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "80e19274-3a8e-4017-adc1-73749cbbb9fc"} 2025-12-05T13:25:06.317Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5c3352ac-6d1c-4dea-a252-72ecae24e4bf", "user": "monitor"} 2025-12-05T13:25:07.182Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5c3352ac-6d1c-4dea-a252-72ecae24e4bf", "user": "monitor"} 2025-12-05T13:25:07.199Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5c3352ac-6d1c-4dea-a252-72ecae24e4bf", "last-applied-secret": "8dabca2723fabb6ac6faf31c1e85406c93850e24d53c55bc567474bffadec216"} 2025-12-05T13:25:08.779Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5c3352ac-6d1c-4dea-a252-72ecae24e4bf"} 2025-12-05T13:25:14.370Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "53bb083f-317b-4db3-bf68-2ca0cd80b181"} 2025-12-05T13:25:19.479Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "888bc010-615c-49cd-b59b-37f2fe2632fb"} 2025-12-05T13:25:24.883Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "20708ad1-9dcb-44ec-8167-f6c74f0a6c9e"} 2025-12-05T13:25:30.371Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "374f53b2-da23-4ed4-a67b-640f1e5608f3"} 2025-12-05T13:25:31.366Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5c742aa3-0f87-45e2-a625-5c1eb4fe8a28", "user": "operator"} 2025-12-05T13:25:31.382Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5c742aa3-0f87-45e2-a625-5c1eb4fe8a28", "user": "operator"} 2025-12-05T13:25:31.418Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5c742aa3-0f87-45e2-a625-5c1eb4fe8a28", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-05T13:25:31.442Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5c742aa3-0f87-45e2-a625-5c1eb4fe8a28", "user": "operator"} 2025-12-05T13:25:31.457Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5c742aa3-0f87-45e2-a625-5c1eb4fe8a28", "user": "operator"} 2025-12-05T13:25:31.477Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5c742aa3-0f87-45e2-a625-5c1eb4fe8a28", "last-applied-secret": "ec5ac83f77928a209a8e6815bcad9ecb1b05824aa0d3b2f290000ad4fbf9a47b"} 2025-12-05T13:25:31.481Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5c742aa3-0f87-45e2-a625-5c1eb4fe8a28", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:25:35.075Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5c742aa3-0f87-45e2-a625-5c1eb4fe8a28", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-05T13:25:52.110Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "01cf23e7-c91e-4278-8212-cf3ab2444136"} 2025-12-05T13:25:56.582Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "8bfee7c6-ec21-41e5-9fec-cf70470531b2"} 2025-12-05T13:26:02.280Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4ab61bc3-f359-44cb-bb3e-9b4de5041574"} 2025-12-05T13:26:07.208Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "cd474b33-32ee-4ef4-b443-ce19ddc18995"} 2025-12-05T13:26:10.283Z INFO Created user secrets {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "secrets": "my-cluster-secrets-2"} 2025-12-05T13:26:10.289Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "root"} 2025-12-05T13:26:10.316Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "root"} 2025-12-05T13:26:10.333Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "secret": "some-name-mysql-init", "user": "root"} 2025-12-05T13:26:12.765Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671"} 2025-12-05T13:26:12.786Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "root"} 2025-12-05T13:26:12.806Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "root"} 2025-12-05T13:26:12.814Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "operator"} 2025-12-05T13:26:12.830Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "operator"} 2025-12-05T13:26:12.851Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-05T13:26:12.883Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "operator"} 2025-12-05T13:26:12.899Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "operator"} 2025-12-05T13:26:12.907Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "monitor"} 2025-12-05T13:26:12.924Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "monitor"} 2025-12-05T13:26:12.946Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-05T13:26:12.964Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "monitor"} 2025-12-05T13:26:12.987Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "monitor"} 2025-12-05T13:26:13.298Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "xtrabackup"} 2025-12-05T13:26:13.314Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "xtrabackup"} 2025-12-05T13:26:13.331Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-05T13:26:13.351Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "xtrabackup"} 2025-12-05T13:26:13.364Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "xtrabackup"} 2025-12-05T13:26:13.369Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "replication"} 2025-12-05T13:26:13.384Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "replication"} 2025-12-05T13:26:13.403Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "secret": "some-name-mysql-init", "user": "replication"} 2025-12-05T13:26:13.429Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "replication"} 2025-12-05T13:26:13.444Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "replication"} 2025-12-05T13:26:13.444Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "proxyadmin"} 2025-12-05T13:26:13.462Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "proxyadmin"} 2025-12-05T13:26:13.483Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "user": "proxyadmin"} 2025-12-05T13:26:13.483Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "last-applied-secret": "dd4d0cf46948def1d20931265f1f0f2b55997c4beaf142bb5f5042be3005b962"} 2025-12-05T13:26:13.483Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "last-applied-secret": "dd4d0cf46948def1d20931265f1f0f2b55997c4beaf142bb5f5042be3005b962"} 2025-12-05T13:26:13.486Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:26:13.553Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:26:14.912Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "711c0bc6-2ee3-4d0b-a8da-da7b6a98c3a1", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-28609 on 34.118.224.10:53: no such host"} 2025-12-05T13:26:15.704Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c14b0f9d-e41c-43d7-80db-9f6abdd8d671", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-05T13:27:02.556Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "0ed4a7cd-0a77-47e9-b427-5339d39eaa20", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28609 on 34.118.224.10:53: no such host"} 2025-12-05T13:27:11.990Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "d1ba0c9f-88d6-4976-a79e-7554281b9191", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.137.104.70:33062: connect: connection refused"} 2025-12-05T13:27:59.453Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "730ff403-d416-4070-940c-599a2aa36c16", "primary name": "some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local"} 2025-12-05T13:28:15.002Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "84c18875-aee8-4f0a-8321-cc7e50c69168", "primary name": "some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local"} 2025-12-05T13:28:30.507Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b3a47eb1-28bc-4971-8a1e-f22920e3de55", "primary name": "some-name-pxc-0.some-name-pxc.users-28609.svc.cluster.local"} 2025-12-05T13:28:36.518Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "ca29f1c4-5826-4a28-bf99-8976457ff723", "user": "monitor"} 2025-12-05T13:28:37.405Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "ca29f1c4-5826-4a28-bf99-8976457ff723", "user": "monitor"} 2025-12-05T13:28:37.420Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "ca29f1c4-5826-4a28-bf99-8976457ff723", "last-applied-secret": "dd4d0cf46948def1d20931265f1f0f2b55997c4beaf142bb5f5042be3005b962"} 2025-12-05T13:28:41.303Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "ca29f1c4-5826-4a28-bf99-8976457ff723"} 2025-12-05T13:28:43.304Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "17a5eacc-b9d7-4e32-9561-426082d7a3a8"} 2025-12-05T13:28:43.333Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "05f8db98-28e7-4320-ab0e-1e63a6d7a0eb", "user": "operator"} 2025-12-05T13:28:43.347Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "05f8db98-28e7-4320-ab0e-1e63a6d7a0eb", "user": "operator"} 2025-12-05T13:28:43.367Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "05f8db98-28e7-4320-ab0e-1e63a6d7a0eb", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-05T13:28:43.386Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "05f8db98-28e7-4320-ab0e-1e63a6d7a0eb", "user": "operator"} 2025-12-05T13:28:43.400Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "05f8db98-28e7-4320-ab0e-1e63a6d7a0eb", "user": "operator"} 2025-12-05T13:28:43.421Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "05f8db98-28e7-4320-ab0e-1e63a6d7a0eb", "last-applied-secret": "b5416c150db08307f703f5722953a4afe339dfb2dc78b81f15af58338d9bb63e"} 2025-12-05T13:28:43.425Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "05f8db98-28e7-4320-ab0e-1e63a6d7a0eb", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:28:46.744Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "05f8db98-28e7-4320-ab0e-1e63a6d7a0eb", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28609.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-05T13:29:28.761Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c0cdffbf-bb13-4430-9640-ed578346fc92"} 2025-12-05T13:29:32.643Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "21f4b04a-1341-430a-8548-e194c9377b37"} 2025-12-05T13:29:37.993Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b7d31ffb-0bca-4c13-b3e7-ecb07db57c42"} 2025-12-05T13:29:43.081Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "75f88969-0d48-4972-9358-66d4564f0c42"} 2025-12-05T13:29:49.203Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "73783df6-077d-4459-b390-968d748b5f67"} 2025-12-05T13:29:54.539Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "04ffed44-16c3-4986-a399-f19380b5522d"} 2025-12-05T13:30:00.177Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "7ff44bbc-cd9d-4074-ac30-722e9d07d16d"} 2025-12-05T13:30:05.524Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "600d9f79-33ff-4b87-a607-8a9a2982bb4f"} 2025-12-05T13:30:10.434Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "5fb434dd-ce86-4aa4-b4d5-389501a13e35"} 2025-12-05T13:30:16.027Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "7a4a88b7-bc02-4e2a-a34a-7f1e9d892277"} 2025-12-05T13:30:21.324Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "c445ffa7-473c-4c46-9b27-768515b30bc9"} 2025-12-05T13:30:26.709Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "8d08a289-9466-47f6-a38d-865fac72fd41"} 2025-12-05T13:30:32.080Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "f98083f0-f603-4b87-847f-7b0c94b13980"} 2025-12-05T13:30:37.299Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "3bc23d6a-f669-4794-b0d6-520c9587c871"} 2025-12-05T13:30:42.688Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "beb87d1e-e7aa-46c0-b649-7337f8f33dff"} 2025-12-05T13:30:44.880Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "root"} 2025-12-05T13:30:44.902Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "root"} 2025-12-05T13:30:44.928Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "secret": "some-name-mysql-init", "user": "root"} 2025-12-05T13:30:47.361Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3"} 2025-12-05T13:30:47.551Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "root"} 2025-12-05T13:30:47.571Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "root"} 2025-12-05T13:30:47.586Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "monitor"} 2025-12-05T13:30:47.601Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "monitor"} 2025-12-05T13:30:47.671Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-05T13:30:47.689Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "monitor"} 2025-12-05T13:30:47.764Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "monitor"} 2025-12-05T13:30:48.116Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "xtrabackup"} 2025-12-05T13:30:48.133Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "xtrabackup"} 2025-12-05T13:30:48.162Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-05T13:30:48.216Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "xtrabackup"} 2025-12-05T13:30:48.229Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "xtrabackup"} 2025-12-05T13:30:48.237Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "proxyadmin"} 2025-12-05T13:30:48.255Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "proxyadmin"} 2025-12-05T13:30:48.291Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "user": "proxyadmin"} 2025-12-05T13:30:48.291Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "last-applied-secret": "1a1ee814a764af973da75804b6d460e2c4c5723ab4436ab4f9e48e078bdc880b"} 2025-12-05T13:30:48.291Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "last-applied-secret": "1a1ee814a764af973da75804b6d460e2c4c5723ab4436ab4f9e48e078bdc880b"} 2025-12-05T13:30:48.294Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:30:48.368Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:30:50.627Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "6a7e1e2c-9348-4596-a217-d4aa2845cdf3", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-05T13:31:07.337Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "da3b362a-c5fb-4cfa-a37e-3637dcb08d25", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:31:07.388Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "da3b362a-c5fb-4cfa-a37e-3637dcb08d25", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-05T13:31:07.466Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "da3b362a-c5fb-4cfa-a37e-3637dcb08d25", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-05T13:31:07.641Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "da3b362a-c5fb-4cfa-a37e-3637dcb08d25", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-05T13:31:07.731Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "da3b362a-c5fb-4cfa-a37e-3637dcb08d25", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-05T13:31:08.738Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4cf0a3a8-ed31-470c-98da-c1de6f13ced5", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-05T13:32:29.539Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "8ad35790-7ce3-45a5-bdad-622d83019829", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28609 on 34.118.224.10:53: no such host"} 2025-12-05T13:32:34.480Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "705f2e24-3a8a-4e4c-ae2f-83779a2cb5f3", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28609 on 34.118.224.10:53: no such host"} 2025-12-05T13:33:16.214Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "ccc3b785-5bb4-477b-96b2-f9968d54c1a1", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-28609 on 34.118.224.10:53: no such host"} 2025-12-05T13:33:21.512Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "204048c7-2078-44a8-a049-200071ab786b", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-28609 on 34.118.224.10:53: no such host"} 2025-12-05T13:34:03.936Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "root"} 2025-12-05T13:34:03.959Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "root"} 2025-12-05T13:34:03.980Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "secret": "some-name-mysql-init", "user": "root"} 2025-12-05T13:34:04.001Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "root"} 2025-12-05T13:34:04.023Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "root"} 2025-12-05T13:34:04.030Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "operator"} 2025-12-05T13:34:04.044Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "operator"} 2025-12-05T13:34:04.073Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-05T13:34:04.097Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "operator"} 2025-12-05T13:34:04.110Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "operator"} 2025-12-05T13:34:04.117Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "monitor"} 2025-12-05T13:34:04.133Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "monitor"} 2025-12-05T13:34:04.155Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-05T13:34:04.193Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "monitor"} 2025-12-05T13:34:04.477Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "xtrabackup"} 2025-12-05T13:34:04.491Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "xtrabackup"} 2025-12-05T13:34:04.518Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-05T13:34:04.545Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "xtrabackup"} 2025-12-05T13:34:04.557Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "xtrabackup"} 2025-12-05T13:34:04.564Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "replication"} 2025-12-05T13:34:04.579Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "replication"} 2025-12-05T13:34:04.606Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "secret": "some-name-mysql-init", "user": "replication"} 2025-12-05T13:34:04.643Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "replication"} 2025-12-05T13:34:04.657Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "user": "replication"} 2025-12-05T13:34:04.657Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "last-applied-secret": "ec5ac83f77928a209a8e6815bcad9ecb1b05824aa0d3b2f290000ad4fbf9a47b"} 2025-12-05T13:34:04.659Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "540093ce-e979-4f9c-b618-0f6fbcf49ec4", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T13:35:26.299Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "a5619442-35ac-4177-bbca-333f8e25b00a", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.137.104.78:33062: i/o timeout"} 2025-12-05T13:36:45.738Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "28fde431-1347-410a-8382-c5269a0c316a", "user": "monitor"} 2025-12-05T13:36:46.647Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "28fde431-1347-410a-8382-c5269a0c316a", "user": "monitor"} 2025-12-05T13:36:48.522Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4a21c340-955f-49f8-82b3-a77e8134fdc3", "user": "monitor"} 2025-12-05T13:36:48.537Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4a21c340-955f-49f8-82b3-a77e8134fdc3", "user": "monitor"} 2025-12-05T13:36:48.561Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4a21c340-955f-49f8-82b3-a77e8134fdc3", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-05T13:36:48.585Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "4a21c340-955f-49f8-82b3-a77e8134fdc3", "user": "monitor"} 2025-12-05T13:36:50.736Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "ea29c273-67ae-4f23-833f-37321711025a", "user": "monitor"} 2025-12-05T13:36:56.282Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "03b12e72-dbe1-4d82-993f-ec7fd3425aa0", "user": "monitor"} 2025-12-05T13:37:02.392Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "b27bdd30-7a2b-4752-a970-59ace3e6fbb2", "user": "monitor"} 2025-12-05T13:37:08.003Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "a80cbbe6-ba6c-48fa-80f1-a35eb775a958", "user": "monitor"} 2025-12-05T13:37:13.608Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-28609"}, "namespace": "users-28609", "name": "some-name", "reconcileID": "a8971ed2-6574-4c32-b90f-83601560d5c0", "user": "monitor"} ... // 22 identical fields ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields ... // 3 identical elements ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields ... // 4 identical fields ... // 5 identical elements ... // 5 identical fields ... // 5 identical fields ... // 5 identical fields ... // 6 identical fields ... // 6 identical fields ... // 7 identical fields - "8dabca2723fabb6ac6faf31c1e85406c93850e24d53c55bc567474bffadec216", + "8dabca2723fabb6ac6faf31c1e85406c93850e24d53c55bc567474bffadec216", ... // 8 identical fields - "9e89dc8b254dfebd0af02bc9edc244e8d08cc5789e09ef2a8eb6b8d05713848", ... // 9 identical fields ... // 9 identical fields - "a2a4640afab8db39535e5b142a78df185b4129cbe3a31e6c4524281285f66748", AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ + APIVersion: "", - APIVersion: "apps/v1", - APIVersion: "apps/v1", - APIVersion: "v1", Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, - Args: []string{"logrotate"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, "b", - "b5416c150db08307f703f5722953a4afe339dfb2dc78b81f15af58338d9bb63e", + "b5416c150db08307f703f5722953a4afe339dfb2dc78b81f15af58338d9bb63e", Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMapKeyRef: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-12-05 13:14:43 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-proxysql-5d965498b6", - CurrentRevision: "some-name-proxysql-6c45bc4644", - CurrentRevision: "some-name-proxysql-78d496f47f", - CurrentRevision: "some-name-proxysql-7d74bf9fc6", - CurrentRevision: "some-name-proxysql-857969bb87", - CurrentRevision: "some-name-proxysql-ccd6f7c9b", - CurrentRevision: "some-name-pxc-6bb967bb84", - CurrentRevision: "some-name-pxc-7749f76bbd", - CurrentRevision: "some-name-pxc-7dc8745949", - CurrentRevision: "some-name-pxc-8bfffb68d", DataSource: nil, DataSourceRef: nil, + "dd4d0cf46948def1d20931265f1f0f2b55997c4beaf142bb5f5042be3005b96", - "dd4d0cf46948def1d20931265f1f0f2b55997c4beaf142bb5f5042be3005b962", + "dd4d0cf46948def1d20931265f1f0f2b55997c4beaf142bb5f5042be3005b962", - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", + "ec5ac83f77928a209a8e6815bcad9ecb1b05824aa0d3b2f290000ad4fbf9a47", - "ec5ac83f77928a209a8e6815bcad9ecb1b05824aa0d3b2f290000ad4fbf9a47b", + "ec5ac83f77928a209a8e6815bcad9ecb1b05824aa0d3b2f290000ad4fbf9a47b", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, - EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, Env: []v1.EnvVar{ - Env: []v1.EnvVar{ EphemeralContainers: nil, FailureThreshold: 3, FC: nil, FieldPath: "metadata.name", FieldPath: "metadata.namespace", FieldRef: &v1.ObjectFieldSelector{ - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:857 HostAliases: nil, HostIP: "", HostPort: 0, - Image: "perconalab/fluentbit:main-logcollector", - Image: "perconalab/fluentbit:main-logcollector", ImagePullPolicy: "Always", - ImagePullPolicy: "Always", InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "9e89dc8b254dfebd0af02bc9edc244e8d08cc5789e09ef2a8eb6b8d057138482", + "last-applied-secret": "a2a4640afab8db39535e5b142a78df185b4129cbe3a31e6c4524281285f66748", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, {Name: "CLUSTER_HASH", Value: "1797040"}, Name: "config", - {Name: "IS_LOGCOLLECTOR", Value: "yes"}, Name: "ist", {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - Name: "logrotate", - Name: "logs", {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, - {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, Name: "POD_NAME", Name: "POD_NAMESPASE", - {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, - {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, Name: "proxyadm", - {Name: "SERVICE_TYPE", Value: "mysql"}, Namespace: "users-28609", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "a2a4640afab8db39535e5b142a78df185b4129cbe3a31e6c4524281285f66748", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "1a1ee814a764af973da75804b6d460e2c4c5723ab4436ab4f9e48e078bdc880b", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "57647444-0618-43e1-9919-8ffc93e1c419", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMWExZWU4MTRhNzY0YWY5NzNkYTc1ODA0YjZkNDYwZTJjNGM1NzIzYWI0NDM2YWI0ZjllNDhlMDc4YmRjODgwYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGRhYmNhMjcyM2ZhYmI2YWM2ZmFmMzFjMWU4NTQwNmM5Mzg1MGUyNGQ1M2M1NWJjNTY3NDc0YmZmYWRlYzIxNiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGRhYmNhMjcyM2ZhYmI2YWM2ZmFmMzFjMWU4NTQwNmM5Mzg1MGUyNGQ1M2M1NWJjNTY3NDc0YmZmYWRlYzIxNiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjU0MTZjMTUwZGIwODMwN2Y3MDNmNTcyMjk1M2E0YWZlMzM5ZGZiMmRjNzhiODFmMTVhZjU4MzM4ZDliYjYzZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjU0MTZjMTUwZGIwODMwN2Y3MDNmNTcyMjk1M2E0YWZlMzM5ZGZiMmRjNzhiODFmMTVhZjU4MzM4ZDliYjYzZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYTJhNDY0MGFmYWI4ZGIzOTUzNWU1YjE0MmE3OGRmMTg1YjQxMjljYmUzYTMxZTZjNDUyNDI4MTI4NWY2Njc0OCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGQ0ZDBjZjQ2OTQ4ZGVmMWQyMDkzMTI2NWYxZjBmMmI1NTk5N2M0YmVhZjE0MmJiNWY1MDQyYmUzMDA1Yjk2MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGQ0ZDBjZjQ2OTQ4ZGVmMWQyMDkzMTI2NWYxZjBmMmI1NTk5N2M0YmVhZjE0MmJiNWY1MDQyYmUzMDA1Yjk2MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZWM1YWM4M2Y3NzkyOGEyMDlhOGU2ODE1YmNhZDllY2IxYjA1ODI0YWEwZDNiMmYyOTAwMDBhZDRmYmY5YTQ3YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZWM1YWM4M2Y3NzkyOGEyMDlhOGU2ODE1YmNhZDllY2IxYjA1ODI0YWEwZDNiMmYyOTAwMDBhZDRmYmY5YTQ3YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMWExZWU4MTRhNzY0YWY5NzNkYTc1ODA0YjZkNDYwZTJjNGM1NzIzYWI0NDM2YWI0ZjllNDhlMDc4YmRjODgwYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMWExZWU4MTRhNzY0YWY5NzNkYTc1ODA0YjZkNDYwZTJjNGM1NzIzYWI0NDM2YWI0ZjllNDhlMDc4YmRjODgwYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMWExZWU4MTRhNzY0YWY5NzNkYTc1ODA0YjZkNDYwZTJjNGM1NzIzYWI0NDM2YWI0ZjllNDhlMDc4YmRjODgwYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjg5LTA0NzU2NGY5IiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL2ZsdWVudGJpdDptYWluLWxvZ2NvbGxlY3RvciIsImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWxvZy1jb2xsZWN0b3IiLCJvcHRpb25hbCI6dHJ1ZX19XSwiZW52IjpbeyJuYW1lIjoiTE9HX0RBVEFfRElSIiwidmFsdWUiOiIvdmFyL2xpYi9teXNxbCJ9LHsibmFtZSI6IlBPRF9OQU1FU1BBU0UiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZXNwYWNlIn19fSx7Im5hbWUiOiJQT0RfTkFNRSIsInZhbHVlRnJvbSI6eyJmaWVsZFJlZiI6eyJmaWVsZFBhdGgiOiJtZXRhZGF0YS5uYW1lIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoibG9ncm90YXRlIiwiaW1hZ2UiOiJwZXJjb25hbGFiL2ZsdWVudGJpdDptYWluLWxvZ2NvbGxlY3RvciIsImFyZ3MiOlsibG9ncm90YXRlIl0sImVudiI6W3sibmFtZSI6IlNFUlZJQ0VfVFlQRSIsInZhbHVlIjoibXlzcWwifSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6InB4YyIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tcHhjOC4wIiwiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9weGMtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbIm15c3FsZCJdLCJwb3J0cyI6W3sibmFtZSI6Im15c3FsIiwiY29udGFpbmVyUG9ydCI6MzMwNn0seyJuYW1lIjoic3N0IiwiY29udGFpbmVyUG9ydCI6NDQ0NH0seyJuYW1lIjoid3JpdGUtc2V0IiwiY29udGFpbmVyUG9ydCI6NDU2N30seyJuYW1lIjoiaXN0IiwiY29udGFpbmVyUG9ydCI6NDU2OH0seyJuYW1lIjoibXlzcWwtYWRtaW4iLCJjb250YWluZXJQb3J0IjozMzA2Mn0seyJuYW1lIjoibXlzcWx4IiwiY29udGFpbmVyUG9ydCI6MzMwNjB9XSwiZW52RnJv"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMWExZWU4MTRhNzY0YWY5NzNkYTc1ODA0YjZkNDYwZTJjNGM1NzIzYWI0NDM2YWI0ZjllNDhlMDc4YmRjODgwYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjg5LTA0NzU2NGY5IiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIxNzk3MDQwIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWU4OWRjOGIyNTRkZmViZDBhZjAyYmM5ZWRjMjQ0ZThkMDhjYzU3ODllMDllZjJhOGViNmI4ZDA1NzEzODQ4MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWU4OWRjOGIyNTRkZmViZDBhZjAyYmM5ZWRjMjQ0ZThkMDhjYzU3ODllMDllZjJhOGViNmI4ZDA1NzEzODQ4MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGQ0ZDBjZjQ2OTQ4ZGVmMWQyMDkzMTI2NWYxZjBmMmI1NTk5N2M0YmVhZjE0MmJiNWY1MDQyYmUzMDA1Yjk2MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGQ0ZDBjZjQ2OTQ4ZGVmMWQyMDkzMTI2NWYxZjBmMmI1NTk5N2M0YmVhZjE0MmJiNWY1MDQyYmUzMDA1Yjk2MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZWM1YWM4M2Y3NzkyOGEyMDlhOGU2ODE1YmNhZDllY2IxYjA1ODI0YWEwZDNiMmYyOTAwMDBhZDRmYmY5YTQ3YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYTJhNDY0MGFmYWI4ZGIzOTUzNWU1YjE0MmE3OGRmMTg1YjQxMjljYmUzYTMxZTZjNDUyNDI4MTI4NWY2Njc0OCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: nil, Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, ResizePolicy: nil, ResourceFieldRef: nil, Resources: {}, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1764940521026351023", - ResourceVersion: "1764940702054127019", - ResourceVersion: "1764940824274095023", - ResourceVersion: "1764940855211263023", - ResourceVersion: "1764940877932415023", - ResourceVersion: "1764941028570079019", - ResourceVersion: "1764941058537903023", - ResourceVersion: "1764941145702383023", - ResourceVersion: "1764941191768927023", - ResourceVersion: "1764941314318399019", - ResourceVersion: "1764941343969279023", - ResourceVersion: "1764941463695231019", - ResourceVersion: "1764941640945823019", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-12-05 13:14:43 +0000 UTC", - Time: s"2025-12-05 13:15:21 +0000 UTC", - Time: s"2025-12-05 13:18:22 +0000 UTC", - Time: s"2025-12-05 13:20:05 +0000 UTC", - Time: s"2025-12-05 13:20:24 +0000 UTC", - Time: s"2025-12-05 13:20:31 +0000 UTC", - Time: s"2025-12-05 13:20:55 +0000 UTC", - Time: s"2025-12-05 13:21:16 +0000 UTC", - Time: s"2025-12-05 13:21:17 +0000 UTC", - Time: s"2025-12-05 13:21:18 +0000 UTC", - Time: s"2025-12-05 13:23:48 +0000 UTC", - Time: s"2025-12-05 13:23:58 +0000 UTC", - Time: s"2025-12-05 13:24:18 +0000 UTC", - Time: s"2025-12-05 13:25:31 +0000 UTC", - Time: s"2025-12-05 13:25:45 +0000 UTC", - Time: s"2025-12-05 13:26:13 +0000 UTC", - Time: s"2025-12-05 13:26:31 +0000 UTC", - Time: s"2025-12-05 13:28:34 +0000 UTC", - Time: s"2025-12-05 13:28:43 +0000 UTC", - Time: s"2025-12-05 13:29:03 +0000 UTC", - Time: s"2025-12-05 13:30:48 +0000 UTC", - Time: s"2025-12-05 13:31:03 +0000 UTC", - Time: s"2025-12-05 13:31:07 +0000 UTC", - Time: s"2025-12-05 13:34:00 +0000 UTC", Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "7386fb33-4d79-4bbc-9e1b-61c7f9d25fa3", - UID: "8bf95a5a-768d-499b-9480-eb28d3e6caf7", + UpdatedReplicas: 0, - UpdatedReplicas: 1, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-proxysql-5d965498b6", - UpdateRevision: "some-name-proxysql-6c45bc4644", - UpdateRevision: "some-name-proxysql-78d496f47f", - UpdateRevision: "some-name-proxysql-7d74bf9fc6", - UpdateRevision: "some-name-proxysql-857969bb87", - UpdateRevision: "some-name-proxysql-ccd6f7c9b", - UpdateRevision: "some-name-pxc-5d4465cf99", - UpdateRevision: "some-name-pxc-6bb967bb84", - UpdateRevision: "some-name-pxc-7749f76bbd", - UpdateRevision: "some-name-pxc-7dc8745949", - UpdateRevision: "some-name-pxc-8bfffb68d", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ Value: "", ValueFrom: &v1.EnvVarSource{ VolumeAttributesClassName: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, - VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-28609 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.SBGngSXI0O ++ mktemp + local LAST_ERR=/tmp/tmp.6DUPnNGyZu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SBGngSXI0O perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-28609 namespace + cat /tmp/tmp.6DUPnNGyZu + rm /tmp/tmp.SBGngSXI0O /tmp/tmp.6DUPnNGyZu + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.TbKUGDqAVU ++ mktemp + local LAST_ERR=/tmp/tmp.f35BVWHVc1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TbKUGDqAVU No resources found + cat /tmp/tmp.f35BVWHVc1 + rm /tmp/tmp.TbKUGDqAVU /tmp/tmp.f35BVWHVc1 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.I1S7WNfZCE ++ mktemp + local LAST_ERR=/tmp/tmp.WZgN5Jh2l0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.I1S7WNfZCE No resources found + cat /tmp/tmp.WZgN5Jh2l0 + rm /tmp/tmp.I1S7WNfZCE /tmp/tmp.WZgN5Jh2l0 + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.ww10Ufl0x4 ++ mktemp + local LAST_ERR=/tmp/tmp.rekgbKCEOE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ww10Ufl0x4 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.rekgbKCEOE + rm /tmp/tmp.ww10Ufl0x4 /tmp/tmp.rekgbKCEOE + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-28609 + rm -rf /tmp/tmp.eAHkEcbyFX + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.0SEUguJv5D + local LAST_OUT=/tmp/tmp.XNTeuNLdH1 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.tsFJkF09nR + local exit_status=0 + local LAST_ERR=/tmp/tmp.w3VBgE1fHg + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-28609 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator