Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/logs/users-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-13701 + local ns=users-13701 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-21773 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.8zjn89fpws ++ mktemp + local LAST_ERR=/tmp/tmp.lpbGibt6aQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8zjn89fpws perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-21773 namespace + cat /tmp/tmp.lpbGibt6aQ + rm /tmp/tmp.8zjn89fpws /tmp/tmp.lpbGibt6aQ + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.xQXyi05cj8 ++ mktemp + local LAST_ERR=/tmp/tmp.ecycD8xoFO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xQXyi05cj8 No resources found + cat /tmp/tmp.ecycD8xoFO + rm /tmp/tmp.xQXyi05cj8 /tmp/tmp.ecycD8xoFO + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.AVcf88Nw5t ++ mktemp + local LAST_ERR=/tmp/tmp.sJGeB5VCUS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AVcf88Nw5t No resources found + cat /tmp/tmp.sJGeB5VCUS + rm /tmp/tmp.AVcf88Nw5t /tmp/tmp.sJGeB5VCUS + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + awk '{print$1}' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.EkHzfUXVTq ++ mktemp + local LAST_OUT=/tmp/tmp.o2H6tgPV20 + local LAST_ERR=/tmp/tmp.osdyuZtvYz + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.C4RZskocVs + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EkHzfUXVTq + cat /tmp/tmp.osdyuZtvYz + rm /tmp/tmp.EkHzfUXVTq /tmp/tmp.osdyuZtvYz + return 0 namespace "users-21773" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.o2H6tgPV20 namespace "pxc-operator" deleted + cat /tmp/tmp.C4RZskocVs + rm /tmp/tmp.o2H6tgPV20 /tmp/tmp.C4RZskocVs + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.Y4aAmE8R1K ++ mktemp + local LAST_ERR=/tmp/tmp.O9e96msNsV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Y4aAmE8R1K namespace/pxc-operator created + cat /tmp/tmp.O9e96msNsV + rm /tmp/tmp.Y4aAmE8R1K /tmp/tmp.O9e96msNsV + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.PgmeTQnNSP +++ mktemp ++ local LAST_ERR=/tmp/tmp.31cYJxTg7H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PgmeTQnNSP ++ cat /tmp/tmp.31cYJxTg7H ++ rm /tmp/tmp.PgmeTQnNSP /tmp/tmp.31cYJxTg7H ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2234-07fcc419-17-cluster7 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.3IrbmtnMEQ ++ mktemp + local LAST_ERR=/tmp/tmp.rKdlahes8l + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2234-07fcc419-17-cluster7 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3IrbmtnMEQ Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2234-07fcc419-17-cluster7" modified. + cat /tmp/tmp.rKdlahes8l + rm /tmp/tmp.3IrbmtnMEQ /tmp/tmp.rKdlahes8l + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.9ceQEncL0N ++ mktemp + local LAST_ERR=/tmp/tmp.eLrq0SsD4e + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9ceQEncL0N customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.eLrq0SsD4e + rm /tmp/tmp.9ceQEncL0N /tmp/tmp.eLrq0SsD4e + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.BvikHbuC0h ++ mktemp + local LAST_ERR=/tmp/tmp.tkf99zSIwg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BvikHbuC0h clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.tkf99zSIwg + rm /tmp/tmp.BvikHbuC0h /tmp/tmp.tkf99zSIwg + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2234-07fcc419^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - ++ mktemp + local LAST_OUT=/tmp/tmp.ZlzGoYz3xV ++ mktemp + local LAST_ERR=/tmp/tmp.KvCXj6y2Yv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZlzGoYz3xV deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.KvCXj6y2Yv + rm /tmp/tmp.ZlzGoYz3xV /tmp/tmp.KvCXj6y2Yv + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.vAEauvAFF7 ++ mktemp + local LAST_ERR=/tmp/tmp.r1ggC5NMTr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vAEauvAFF7 pod/percona-xtradb-cluster-operator-7f686bcf7c-9v5w7 condition met + cat /tmp/tmp.r1ggC5NMTr + rm /tmp/tmp.vAEauvAFF7 /tmp/tmp.r1ggC5NMTr + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qch9sdhVW4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gcMskjfuyy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qch9sdhVW4 ++ cat /tmp/tmp.gcMskjfuyy ++ rm /tmp/tmp.Qch9sdhVW4 /tmp/tmp.gcMskjfuyy ++ return 0 + wait_pod percona-xtradb-cluster-operator-7f686bcf7c-9v5w7 480 pxc-operator + local pod=percona-xtradb-cluster-operator-7f686bcf7c-9v5w7 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-7f686bcf7c-9v5w7 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-7f686bcf7c-9v5w7 condition met waiting for pod/percona-xtradb-cluster-operator-7f686bcf7c-9v5w7 to become Ready.Ok + sleep 3 + create_namespace users-13701 + local namespace=users-13701 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + xargs kubectl delete ns + desc 'cleaned up old namespaces users-13701' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-13701 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-13701 + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.F9ISJ9laAs ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.x5AEPJNwHK + local exit_status=0 + local LAST_OUT=/tmp/tmp.UZgs1on7s1 ++ mktemp ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-13701 + local LAST_ERR=/tmp/tmp.1CZ9TiW7yT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-13701 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UZgs1on7s1 + cat /tmp/tmp.1CZ9TiW7yT + rm /tmp/tmp.UZgs1on7s1 /tmp/tmp.1CZ9TiW7yT + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-13701 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.F9ISJ9laAs + cat /tmp/tmp.x5AEPJNwHK Error from server (NotFound): namespaces "users-13701" not found + rm /tmp/tmp.F9ISJ9laAs /tmp/tmp.x5AEPJNwHK + return 1 + : + wait_for_delete namespace/users-13701 + local res=namespace/users-13701 + echo -n 'waiting for namespace/users-13701 to be deleted' waiting for namespace/users-13701 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-13701" not found + desc 'create namespace users-13701' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-13701 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-13701 ++ mktemp + local LAST_OUT=/tmp/tmp.BxyA6bvsiW ++ mktemp + local LAST_ERR=/tmp/tmp.j1T6yAaiqB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-13701 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BxyA6bvsiW namespace/users-13701 created + cat /tmp/tmp.j1T6yAaiqB + rm /tmp/tmp.BxyA6bvsiW /tmp/tmp.j1T6yAaiqB + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.nxHaCHXhbl +++ mktemp ++ local LAST_ERR=/tmp/tmp.p6CRfCFuli ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nxHaCHXhbl ++ cat /tmp/tmp.p6CRfCFuli ++ rm /tmp/tmp.nxHaCHXhbl /tmp/tmp.p6CRfCFuli ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2234-07fcc419-17-cluster7 --namespace=users-13701 ++ mktemp + local LAST_OUT=/tmp/tmp.lxZEDdniYw ++ mktemp + local LAST_ERR=/tmp/tmp.sGfxnzlqpw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2234-07fcc419-17-cluster7 --namespace=users-13701 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lxZEDdniYw Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2234-07fcc419-17-cluster7" modified. + cat /tmp/tmp.sGfxnzlqpw + rm /tmp/tmp.lxZEDdniYw /tmp/tmp.sGfxnzlqpw + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.PY7jLxYhlG ++ mktemp + local LAST_ERR=/tmp/tmp.cQfN5lhQLj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PY7jLxYhlG secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.cQfN5lhQLj + rm /tmp/tmp.PY7jLxYhlG /tmp/tmp.cQfN5lhQLj + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.KO18aF7YTd ++ mktemp + local LAST_ERR=/tmp/tmp.ZBPXb8ebQs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KO18aF7YTd secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.ZBPXb8ebQs + rm /tmp/tmp.KO18aF7YTd /tmp/tmp.ZBPXb8ebQs + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/client.yml + local pvc_name= + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_OUT=/tmp/tmp.wtwC8J1DAQ + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2234-07fcc419#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-13701~ + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' ++ mktemp + local LAST_ERR=/tmp/tmp.MwMyXC1XX4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wtwC8J1DAQ deployment.apps/pxc-client created + cat /tmp/tmp.MwMyXC1XX4 + rm /tmp/tmp.wtwC8J1DAQ /tmp/tmp.MwMyXC1XX4 + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + local LAST_OUT=/tmp/tmp.U1zniPAdw7 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2234-07fcc419#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-13701~ ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_ERR=/tmp/tmp.2psgJe9RnL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.U1zniPAdw7 perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.2psgJe9RnL + rm /tmp/tmp.U1zniPAdw7 /tmp/tmp.2psgJe9RnL + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.7n4jlm8Dyl ++++ mktemp +++ local LAST_ERR=/tmp/tmp.rEtWhXlo1R +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.7n4jlm8Dyl +++ cat /tmp/tmp.rEtWhXlo1R +++ rm /tmp/tmp.7n4jlm8Dyl /tmp/tmp.rEtWhXlo1R +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.3hiKnBP2Lu ++++ mktemp +++ local LAST_ERR=/tmp/tmp.oSH2aM2eyh +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.3hiKnBP2Lu +++ cat /tmp/tmp.oSH2aM2eyh +++ rm /tmp/tmp.3hiKnBP2Lu /tmp/tmp.oSH2aM2eyh +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-13701 ++ mktemp + local LAST_OUT=/tmp/tmp.4ITRGRirOX ++ mktemp + local LAST_ERR=/tmp/tmp.v0FoNErxZn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-13701 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-13701 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-13701 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.4ITRGRirOX + cat /tmp/tmp.v0FoNErxZn error: no matching resources found + rm /tmp/tmp.4ITRGRirOX /tmp/tmp.v0FoNErxZn + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-pxc-0 + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.c3Nhpl2YNX +++ mktemp ++ local LAST_ERR=/tmp/tmp.4AwKfwPZfq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c3Nhpl2YNX ++ cat /tmp/tmp.4AwKfwPZfq ++ rm /tmp/tmp.c3Nhpl2YNX /tmp/tmp.4AwKfwPZfq ++ return 0 + local 'root_pass=8mdU@pcM{.,-cZ8m' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jH3EHePKpN +++ mktemp ++ local LAST_ERR=/tmp/tmp.AEcx4yphxB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jH3EHePKpN ++ cat /tmp/tmp.AEcx4yphxB ++ rm /tmp/tmp.jH3EHePKpN /tmp/tmp.AEcx4yphxB ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tu78O8Utj3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.U1rWF1zgi2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tu78O8Utj3 ++ cat /tmp/tmp.U1rWF1zgi2 ++ rm /tmp/tmp.tu78O8Utj3 /tmp/tmp.U1rWF1zgi2 ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RQZWc5EyBo +++ mktemp ++ local LAST_ERR=/tmp/tmp.6AX0675VBm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RQZWc5EyBo ++ cat /tmp/tmp.6AX0675VBm ++ rm /tmp/tmp.RQZWc5EyBo /tmp/tmp.6AX0675VBm ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1.sql /tmp/tmp.AIh4ASubZF/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zPW1I7a6zB +++ mktemp ++ local LAST_ERR=/tmp/tmp.vsJU5SVbEZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zPW1I7a6zB ++ cat /tmp/tmp.vsJU5SVbEZ ++ rm /tmp/tmp.zPW1I7a6zB /tmp/tmp.vsJU5SVbEZ ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1.sql /tmp/tmp.AIh4ASubZF/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''8mdU@pcM{.,-cZ8m'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZplrMPuSIC +++ mktemp ++ local LAST_ERR=/tmp/tmp.ee5gCvaaQT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZplrMPuSIC ++ cat /tmp/tmp.ee5gCvaaQT ++ rm /tmp/tmp.ZplrMPuSIC /tmp/tmp.ee5gCvaaQT ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1.sql /tmp/tmp.AIh4ASubZF/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uBdtR3KdpA +++ mktemp ++ local LAST_ERR=/tmp/tmp.WLgtBzDIGa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uBdtR3KdpA ++ cat /tmp/tmp.WLgtBzDIGa Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.uBdtR3KdpA /tmp/tmp.WLgtBzDIGa ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.9DD9FssXiw +++ mktemp ++ local LAST_ERR=/tmp/tmp.UEl7S04aaV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9DD9FssXiw ++ cat /tmp/tmp.UEl7S04aaV ++ rm /tmp/tmp.9DD9FssXiw /tmp/tmp.UEl7S04aaV ++ return 0 + secret_pass='8mdU@pcM{.,-cZ8m' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.EfPJn1cCrf +++ mktemp ++ local LAST_ERR=/tmp/tmp.N8snDidi2U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EfPJn1cCrf ++ cat /tmp/tmp.N8snDidi2U ++ rm /tmp/tmp.EfPJn1cCrf /tmp/tmp.N8snDidi2U ++ return 0 + int_secret_pass='8mdU@pcM{.,-cZ8m' + [[ -z 8mdU@pcM{.,-cZ8m ]] + [[ 8mdU@pcM{.,-cZ8m != \8\m\d\U\@\p\c\M\{\.\,\-\c\Z\8\m ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''8mdU@pcM{.,-cZ8m'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''8mdU@pcM{.,-cZ8m'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''8mdU@pcM{.,-cZ8m'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''8mdU@pcM{.,-cZ8m'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CC2jzoANrX +++ mktemp ++ local LAST_ERR=/tmp/tmp.BMJGaui5mN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CC2jzoANrX ++ cat /tmp/tmp.BMJGaui5mN ++ rm /tmp/tmp.CC2jzoANrX /tmp/tmp.BMJGaui5mN ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.AIh4ASubZF/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.AGVNcZjJuN +++ mktemp ++ local LAST_ERR=/tmp/tmp.sAQ5jkgKdS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AGVNcZjJuN ++ cat /tmp/tmp.sAQ5jkgKdS ++ rm /tmp/tmp.AGVNcZjJuN /tmp/tmp.sAQ5jkgKdS ++ return 0 + secret_pass='?}MyJFh3_jYMM{ZX' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.JpXf52t7KK +++ mktemp ++ local LAST_ERR=/tmp/tmp.WOKqQlFpIN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JpXf52t7KK ++ cat /tmp/tmp.WOKqQlFpIN ++ rm /tmp/tmp.JpXf52t7KK /tmp/tmp.WOKqQlFpIN ++ return 0 + int_secret_pass='?}MyJFh3_jYMM{ZX' + [[ -z ?}MyJFh3_jYMM{ZX ]] + [[ ?}MyJFh3_jYMM{ZX != \?\}\M\y\J\F\h\3\_\j\Y\M\M\{\Z\X ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''?}MyJFh3_jYMM{ZX'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''?}MyJFh3_jYMM{ZX'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''?}MyJFh3_jYMM{ZX'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''?}MyJFh3_jYMM{ZX'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HcemWSrpjw +++ mktemp ++ local LAST_ERR=/tmp/tmp.yakGIMYvf8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HcemWSrpjw ++ cat /tmp/tmp.yakGIMYvf8 ++ rm /tmp/tmp.HcemWSrpjw /tmp/tmp.yakGIMYvf8 ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.AIh4ASubZF/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.5r2T3JxNQI +++ mktemp ++ local LAST_ERR=/tmp/tmp.AUZJo3lz0m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5r2T3JxNQI ++ cat /tmp/tmp.AUZJo3lz0m ++ rm /tmp/tmp.5r2T3JxNQI /tmp/tmp.AUZJo3lz0m ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.8yudllYVEK +++ mktemp ++ local LAST_ERR=/tmp/tmp.y347fXIxNv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8yudllYVEK ++ cat /tmp/tmp.y347fXIxNv ++ rm /tmp/tmp.8yudllYVEK /tmp/tmp.y347fXIxNv ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JGAc9e3UCt +++ mktemp ++ local LAST_ERR=/tmp/tmp.1xrup3YMMr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JGAc9e3UCt ++ cat /tmp/tmp.1xrup3YMMr ++ rm /tmp/tmp.JGAc9e3UCt /tmp/tmp.1xrup3YMMr ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.AIh4ASubZF/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.yOvzvqTqD9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VqbjjZx0Av ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yOvzvqTqD9 ++ cat /tmp/tmp.VqbjjZx0Av ++ rm /tmp/tmp.yOvzvqTqD9 /tmp/tmp.VqbjjZx0Av ++ return 0 + secret_pass='!pXcvo8FmeMl4*2-fD' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.kOX2Y1cLZ9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ExpLl84QyS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kOX2Y1cLZ9 ++ cat /tmp/tmp.ExpLl84QyS ++ rm /tmp/tmp.kOX2Y1cLZ9 /tmp/tmp.ExpLl84QyS ++ return 0 + int_secret_pass='!pXcvo8FmeMl4*2-fD' + [[ -z !pXcvo8FmeMl4*2-fD ]] + [[ !pXcvo8FmeMl4*2-fD != \!\p\X\c\v\o\8\F\m\e\M\l\4\*\2\-\f\D ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''!pXcvo8FmeMl4*2-fD'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''!pXcvo8FmeMl4*2-fD'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''!pXcvo8FmeMl4*2-fD'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''!pXcvo8FmeMl4*2-fD'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql /tmp/tmp.AIh4ASubZF/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.oEYRpfMCW6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6aqg6gLevG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oEYRpfMCW6 ++ cat /tmp/tmp.6aqg6gLevG ++ rm /tmp/tmp.oEYRpfMCW6 /tmp/tmp.6aqg6gLevG ++ return 0 + secret_pass='W#vqH>&]IC}%W]2[1S9' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ base64 --decode ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v6lxIIuTnw +++ mktemp ++ local LAST_ERR=/tmp/tmp.HRiwsQd2cN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v6lxIIuTnw ++ cat /tmp/tmp.HRiwsQd2cN ++ rm /tmp/tmp.v6lxIIuTnw /tmp/tmp.HRiwsQd2cN ++ return 0 + int_secret_pass='W#vqH>&]IC}%W]2[1S9' + [[ -z W#vqH>&]IC}%W]2[1S9 ]] + [[ W#vqH>&]IC}%W]2[1S9 != \W\#\v\q\H\>\&\]\I\C\}\%\W\]\2\[\1\S\9 ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''W#vqH>&]IC}%W]2[1S9'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''W#vqH>&]IC}%W]2[1S9'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''W#vqH>&]IC}%W]2[1S9'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''W#vqH>&]IC}%W]2[1S9'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AZiwVkIeFx +++ mktemp ++ local LAST_ERR=/tmp/tmp.qjhCvM4YfU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AZiwVkIeFx ++ cat /tmp/tmp.qjhCvM4YfU ++ rm /tmp/tmp.AZiwVkIeFx /tmp/tmp.qjhCvM4YfU ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.AIh4ASubZF/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.wif64w3nGi +++ mktemp ++ local LAST_ERR=/tmp/tmp.O1m01Bljfu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wif64w3nGi ++ cat /tmp/tmp.O1m01Bljfu ++ rm /tmp/tmp.wif64w3nGi /tmp/tmp.O1m01Bljfu ++ return 0 + secret_pass='6WUuyfvrA(A3?%6Xe+B' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ypNH24VNFN +++ mktemp ++ local LAST_ERR=/tmp/tmp.wEreAWwrom ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ypNH24VNFN ++ cat /tmp/tmp.wEreAWwrom ++ rm /tmp/tmp.ypNH24VNFN /tmp/tmp.wEreAWwrom ++ return 0 + int_secret_pass='6WUuyfvrA(A3?%6Xe+B' + [[ -z 6WUuyfvrA(A3?%6Xe+B ]] + [[ 6WUuyfvrA(A3?%6Xe+B != \6\W\U\u\y\f\v\r\A\(\A\3\?\%\6\X\e\+\B ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''6WUuyfvrA(A3?%6Xe+B'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''6WUuyfvrA(A3?%6Xe+B'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''6WUuyfvrA(A3?%6Xe+B'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''6WUuyfvrA(A3?%6Xe+B'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZKnO8ePqyG +++ mktemp ++ local LAST_ERR=/tmp/tmp.1doBeJG89f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZKnO8ePqyG ++ cat /tmp/tmp.1doBeJG89f ++ rm /tmp/tmp.ZKnO8ePqyG /tmp/tmp.1doBeJG89f ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.AIh4ASubZF/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Uu0XlZlClp ++ mktemp + local LAST_ERR=/tmp/tmp.W7DaP9DVtX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Uu0XlZlClp secret/my-cluster-secrets patched + cat /tmp/tmp.W7DaP9DVtX + rm /tmp/tmp.Uu0XlZlClp /tmp/tmp.W7DaP9DVtX + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wL9FW7jR8z +++ mktemp ++ local LAST_ERR=/tmp/tmp.GWuoSMSABC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wL9FW7jR8z ++ cat /tmp/tmp.GWuoSMSABC ++ rm /tmp/tmp.wL9FW7jR8z /tmp/tmp.GWuoSMSABC ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.AIh4ASubZF/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.lucFeq7gsg ++ mktemp + local LAST_ERR=/tmp/tmp.EG5TBPXfi7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lucFeq7gsg perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.EG5TBPXfi7 + rm /tmp/tmp.lucFeq7gsg /tmp/tmp.EG5TBPXfi7 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IAjzisebCl +++ mktemp ++ local LAST_ERR=/tmp/tmp.uNGGKseIZL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IAjzisebCl ++ cat /tmp/tmp.uNGGKseIZL ++ rm /tmp/tmp.IAjzisebCl /tmp/tmp.uNGGKseIZL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zfv54HmwTr +++ mktemp ++ local LAST_ERR=/tmp/tmp.lzG78k2FpK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zfv54HmwTr ++ cat /tmp/tmp.lzG78k2FpK ++ rm /tmp/tmp.Zfv54HmwTr /tmp/tmp.lzG78k2FpK ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.za8jD8ZbhI ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.smz2YWXhms +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.za8jD8ZbhI +++++ cat /tmp/tmp.smz2YWXhms +++++ rm /tmp/tmp.za8jD8ZbhI /tmp/tmp.smz2YWXhms +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.akUJ1OfKCq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.eYJ5VdEp5M +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.akUJ1OfKCq +++++ cat /tmp/tmp.eYJ5VdEp5M +++++ rm /tmp/tmp.akUJ1OfKCq /tmp/tmp.eYJ5VdEp5M +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uB3erHeTuF +++ mktemp ++ local LAST_ERR=/tmp/tmp.cRiEgHhjFm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uB3erHeTuF ++ cat /tmp/tmp.cRiEgHhjFm ++ rm /tmp/tmp.uB3erHeTuF /tmp/tmp.cRiEgHhjFm ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.faT4vFucag ++ mktemp + local LAST_ERR=/tmp/tmp.XHqW7WDZhN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.faT4vFucag secret/my-cluster-secrets patched + cat /tmp/tmp.XHqW7WDZhN + rm /tmp/tmp.faT4vFucag /tmp/tmp.XHqW7WDZhN + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UMbfL31KDD +++ mktemp ++ local LAST_ERR=/tmp/tmp.1SyZZR7lTU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UMbfL31KDD ++ cat /tmp/tmp.1SyZZR7lTU ++ rm /tmp/tmp.UMbfL31KDD /tmp/tmp.1SyZZR7lTU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NSfVpgC1rq +++ mktemp ++ local LAST_ERR=/tmp/tmp.TOsuwjfted ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NSfVpgC1rq ++ cat /tmp/tmp.TOsuwjfted ++ rm /tmp/tmp.NSfVpgC1rq /tmp/tmp.TOsuwjfted ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iNaEC80giM +++ mktemp ++ local LAST_ERR=/tmp/tmp.rL66T51AUf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iNaEC80giM ++ cat /tmp/tmp.rL66T51AUf ++ rm /tmp/tmp.iNaEC80giM /tmp/tmp.rL66T51AUf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GQoAl1GnNJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.q6RVL7HVXB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GQoAl1GnNJ ++ cat /tmp/tmp.q6RVL7HVXB ++ rm /tmp/tmp.GQoAl1GnNJ /tmp/tmp.q6RVL7HVXB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GWhpKnZmoG +++ mktemp ++ local LAST_ERR=/tmp/tmp.cmE0PiQQk9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GWhpKnZmoG ++ cat /tmp/tmp.cmE0PiQQk9 ++ rm /tmp/tmp.GWhpKnZmoG /tmp/tmp.cmE0PiQQk9 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GnS2jZYIDc +++ mktemp ++ local LAST_ERR=/tmp/tmp.flR0SktzNu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GnS2jZYIDc ++ cat /tmp/tmp.flR0SktzNu ++ rm /tmp/tmp.GnS2jZYIDc /tmp/tmp.flR0SktzNu ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FmdCkoD0FS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Jnm9lb3CHZ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FmdCkoD0FS +++++ cat /tmp/tmp.Jnm9lb3CHZ +++++ rm /tmp/tmp.FmdCkoD0FS /tmp/tmp.Jnm9lb3CHZ +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.SUlIikSvMA ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.CaPTvyDy30 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.SUlIikSvMA +++++ cat /tmp/tmp.CaPTvyDy30 +++++ rm /tmp/tmp.SUlIikSvMA /tmp/tmp.CaPTvyDy30 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y57opHitBq +++ mktemp ++ local LAST_ERR=/tmp/tmp.n0RF5twCiJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y57opHitBq ++ cat /tmp/tmp.n0RF5twCiJ ++ rm /tmp/tmp.y57opHitBq /tmp/tmp.n0RF5twCiJ ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql /tmp/tmp.AIh4ASubZF/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql /tmp/tmp.AIh4ASubZF/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql /tmp/tmp.AIh4ASubZF/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.sdzD7neXdy ++ mktemp + local LAST_ERR=/tmp/tmp.iS8byKijs4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sdzD7neXdy perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.iS8byKijs4 + rm /tmp/tmp.sdzD7neXdy /tmp/tmp.iS8byKijs4 + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.2LoOHLwA5v ++ mktemp + local LAST_ERR=/tmp/tmp.j6UtZrBKKm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2LoOHLwA5v secret/my-cluster-secrets patched + cat /tmp/tmp.j6UtZrBKKm + rm /tmp/tmp.2LoOHLwA5v /tmp/tmp.j6UtZrBKKm + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5sOanmVVuD +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bll7iihJcJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5sOanmVVuD ++ cat /tmp/tmp.Bll7iihJcJ ++ rm /tmp/tmp.5sOanmVVuD /tmp/tmp.Bll7iihJcJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s6VbOlAHsH +++ mktemp ++ local LAST_ERR=/tmp/tmp.mivrILRP2M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s6VbOlAHsH ++ cat /tmp/tmp.mivrILRP2M ++ rm /tmp/tmp.s6VbOlAHsH /tmp/tmp.mivrILRP2M ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DH58LWA0Ux +++ mktemp ++ local LAST_ERR=/tmp/tmp.TMcvSBGTE3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DH58LWA0Ux ++ cat /tmp/tmp.TMcvSBGTE3 ++ rm /tmp/tmp.DH58LWA0Ux /tmp/tmp.TMcvSBGTE3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XdONNXL6Xo +++ mktemp ++ local LAST_ERR=/tmp/tmp.8TlgoVQoQi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XdONNXL6Xo ++ cat /tmp/tmp.8TlgoVQoQi ++ rm /tmp/tmp.XdONNXL6Xo /tmp/tmp.8TlgoVQoQi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S5rpo5uDHx +++ mktemp ++ local LAST_ERR=/tmp/tmp.q9h1E8tIEK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S5rpo5uDHx ++ cat /tmp/tmp.q9h1E8tIEK ++ rm /tmp/tmp.S5rpo5uDHx /tmp/tmp.q9h1E8tIEK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AGhPrxl3Nz +++ mktemp ++ local LAST_ERR=/tmp/tmp.YOehhzQMGy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AGhPrxl3Nz ++ cat /tmp/tmp.YOehhzQMGy ++ rm /tmp/tmp.AGhPrxl3Nz /tmp/tmp.YOehhzQMGy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RRaMdOZ6m7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2vzXHLaBW9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RRaMdOZ6m7 ++ cat /tmp/tmp.2vzXHLaBW9 ++ rm /tmp/tmp.RRaMdOZ6m7 /tmp/tmp.2vzXHLaBW9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c9Sa6JsJCt +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q3SlEmaIyG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c9Sa6JsJCt ++ cat /tmp/tmp.Q3SlEmaIyG ++ rm /tmp/tmp.c9Sa6JsJCt /tmp/tmp.Q3SlEmaIyG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K5T1i3GRvF +++ mktemp ++ local LAST_ERR=/tmp/tmp.4W1ipXahWs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K5T1i3GRvF ++ cat /tmp/tmp.4W1ipXahWs ++ rm /tmp/tmp.K5T1i3GRvF /tmp/tmp.4W1ipXahWs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xamOEQ7t9m +++ mktemp ++ local LAST_ERR=/tmp/tmp.BHlNBxR56F ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xamOEQ7t9m ++ cat /tmp/tmp.BHlNBxR56F ++ rm /tmp/tmp.xamOEQ7t9m /tmp/tmp.BHlNBxR56F ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4LEee1c44X +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wr2nes1ilr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4LEee1c44X ++ cat /tmp/tmp.Wr2nes1ilr ++ rm /tmp/tmp.4LEee1c44X /tmp/tmp.Wr2nes1ilr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l6OKtxAETC +++ mktemp ++ local LAST_ERR=/tmp/tmp.P4IilClJWS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l6OKtxAETC ++ cat /tmp/tmp.P4IilClJWS ++ rm /tmp/tmp.l6OKtxAETC /tmp/tmp.P4IilClJWS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rNTdf6oL08 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4mUzr0Gu8T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rNTdf6oL08 ++ cat /tmp/tmp.4mUzr0Gu8T ++ rm /tmp/tmp.rNTdf6oL08 /tmp/tmp.4mUzr0Gu8T ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y0AMvRTyD3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.09Di9OitRg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y0AMvRTyD3 ++ cat /tmp/tmp.09Di9OitRg ++ rm /tmp/tmp.Y0AMvRTyD3 /tmp/tmp.09Di9OitRg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dzvgRplzRD +++ mktemp ++ local LAST_ERR=/tmp/tmp.bAIegIN0lG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dzvgRplzRD ++ cat /tmp/tmp.bAIegIN0lG ++ rm /tmp/tmp.dzvgRplzRD /tmp/tmp.bAIegIN0lG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bOpfyn1NJv +++ mktemp ++ local LAST_ERR=/tmp/tmp.4zOTfFUc0n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bOpfyn1NJv ++ cat /tmp/tmp.4zOTfFUc0n ++ rm /tmp/tmp.bOpfyn1NJv /tmp/tmp.4zOTfFUc0n ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aS9Yf1v43F +++ mktemp ++ local LAST_ERR=/tmp/tmp.dKyH67KYnn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aS9Yf1v43F ++ cat /tmp/tmp.dKyH67KYnn ++ rm /tmp/tmp.aS9Yf1v43F /tmp/tmp.dKyH67KYnn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AhaQ0AGyz9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WMlrNzpzaj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AhaQ0AGyz9 ++ cat /tmp/tmp.WMlrNzpzaj ++ rm /tmp/tmp.AhaQ0AGyz9 /tmp/tmp.WMlrNzpzaj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JyzO6PXbfw +++ mktemp ++ local LAST_ERR=/tmp/tmp.MaQ3KiVZia ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JyzO6PXbfw ++ cat /tmp/tmp.MaQ3KiVZia ++ rm /tmp/tmp.JyzO6PXbfw /tmp/tmp.MaQ3KiVZia ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1DLb9WPkOY +++ mktemp ++ local LAST_ERR=/tmp/tmp.xbijzQ1TxP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1DLb9WPkOY ++ cat /tmp/tmp.xbijzQ1TxP ++ rm /tmp/tmp.1DLb9WPkOY /tmp/tmp.xbijzQ1TxP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hp7Jrb6my1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hF3cZV9a1f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hp7Jrb6my1 ++ cat /tmp/tmp.hF3cZV9a1f ++ rm /tmp/tmp.hp7Jrb6my1 /tmp/tmp.hF3cZV9a1f ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PPum45AN5j +++ mktemp ++ local LAST_ERR=/tmp/tmp.ObRyKRbHnr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PPum45AN5j ++ cat /tmp/tmp.ObRyKRbHnr ++ rm /tmp/tmp.PPum45AN5j /tmp/tmp.ObRyKRbHnr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JXfBY7Fk8j +++ mktemp ++ local LAST_ERR=/tmp/tmp.efecn88W8m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JXfBY7Fk8j ++ cat /tmp/tmp.efecn88W8m ++ rm /tmp/tmp.JXfBY7Fk8j /tmp/tmp.efecn88W8m ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U7NkXGWEIY +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZTLJJTr7bv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U7NkXGWEIY ++ cat /tmp/tmp.ZTLJJTr7bv ++ rm /tmp/tmp.U7NkXGWEIY /tmp/tmp.ZTLJJTr7bv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O9aR8KahdP +++ mktemp ++ local LAST_ERR=/tmp/tmp.LYWcHvmUlX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O9aR8KahdP ++ cat /tmp/tmp.LYWcHvmUlX ++ rm /tmp/tmp.O9aR8KahdP /tmp/tmp.LYWcHvmUlX ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fPt6zQhWkV +++ mktemp ++ local LAST_ERR=/tmp/tmp.zLeMyaR1oz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fPt6zQhWkV ++ cat /tmp/tmp.zLeMyaR1oz ++ rm /tmp/tmp.fPt6zQhWkV /tmp/tmp.zLeMyaR1oz ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.o7Qrb5LRSE ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.RhXDDLmgTD +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.o7Qrb5LRSE +++++ cat /tmp/tmp.RhXDDLmgTD +++++ rm /tmp/tmp.o7Qrb5LRSE /tmp/tmp.RhXDDLmgTD +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.A7YOB3Lms8 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vY4sncn9FG +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.A7YOB3Lms8 +++++ cat /tmp/tmp.vY4sncn9FG +++++ rm /tmp/tmp.A7YOB3Lms8 /tmp/tmp.vY4sncn9FG +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pyTD88AzY1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qQh8C3rSMQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pyTD88AzY1 ++ cat /tmp/tmp.qQh8C3rSMQ ++ rm /tmp/tmp.pyTD88AzY1 /tmp/tmp.qQh8C3rSMQ ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-3-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-3.sql /tmp/tmp.AIh4ASubZF/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.A8GrZbYokH ++ mktemp + local LAST_ERR=/tmp/tmp.RsMdL4u9VA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.A8GrZbYokH secret/my-cluster-secrets patched + cat /tmp/tmp.RsMdL4u9VA + rm /tmp/tmp.A8GrZbYokH /tmp/tmp.RsMdL4u9VA + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.wwsBFbUpgJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.jhnE2dm5Nj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wwsBFbUpgJ ++ cat /tmp/tmp.jhnE2dm5Nj ++ rm /tmp/tmp.wwsBFbUpgJ /tmp/tmp.jhnE2dm5Nj ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep additional_password + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tBxF886HZ2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gu2iMsp2v4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tBxF886HZ2 ++ cat /tmp/tmp.Gu2iMsp2v4 ++ rm /tmp/tmp.tBxF886HZ2 /tmp/tmp.Gu2iMsp2v4 ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace {"additional_password": "$A$005$-8\\u001f\\r\\u001cq!Gn\\u00048hf4+\\u000e/?t27CT4nSl8.dmG8Es3q1V9RKuAz7lhlI/uXVIjHcc6Y7C"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MVB9qWiLCj +++ mktemp ++ local LAST_ERR=/tmp/tmp.AWCU6szOj1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MVB9qWiLCj ++ cat /tmp/tmp.AWCU6szOj1 ++ rm /tmp/tmp.MVB9qWiLCj /tmp/tmp.AWCU6szOj1 ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q8ZZRwEcLQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.LV7Wcka4aT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q8ZZRwEcLQ ++ cat /tmp/tmp.LV7Wcka4aT ++ rm /tmp/tmp.q8ZZRwEcLQ /tmp/tmp.LV7Wcka4aT ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8szhbm1Rsd +++ mktemp ++ local LAST_ERR=/tmp/tmp.AYbCsVzam0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8szhbm1Rsd ++ cat /tmp/tmp.AYbCsVzam0 ++ rm /tmp/tmp.8szhbm1Rsd /tmp/tmp.AYbCsVzam0 ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZMXZ5d3n39 +++ mktemp ++ local LAST_ERR=/tmp/tmp.COBKCYKkZK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZMXZ5d3n39 ++ cat /tmp/tmp.COBKCYKkZK ++ rm /tmp/tmp.ZMXZ5d3n39 /tmp/tmp.COBKCYKkZK ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MnITcbA21G +++ mktemp ++ local LAST_ERR=/tmp/tmp.3AwYhqwaHK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MnITcbA21G ++ cat /tmp/tmp.3AwYhqwaHK ++ rm /tmp/tmp.MnITcbA21G /tmp/tmp.3AwYhqwaHK ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O81udsTJIe +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vflj7y9vVY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O81udsTJIe ++ cat /tmp/tmp.Vflj7y9vVY ++ rm /tmp/tmp.O81udsTJIe /tmp/tmp.Vflj7y9vVY ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PpX1HEzD3P +++ mktemp ++ local LAST_ERR=/tmp/tmp.ujP6aWN4KE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PpX1HEzD3P ++ cat /tmp/tmp.ujP6aWN4KE ++ rm /tmp/tmp.PpX1HEzD3P /tmp/tmp.ujP6aWN4KE ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bFCTJlMAS8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6MTer3Tltm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bFCTJlMAS8 ++ cat /tmp/tmp.6MTer3Tltm ++ rm /tmp/tmp.bFCTJlMAS8 /tmp/tmp.6MTer3Tltm ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ao3dLYBY6e +++ mktemp ++ local LAST_ERR=/tmp/tmp.hY4DMFBog7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ao3dLYBY6e ++ cat /tmp/tmp.hY4DMFBog7 ++ rm /tmp/tmp.Ao3dLYBY6e /tmp/tmp.hY4DMFBog7 ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BTaJR7Z4XP +++ mktemp ++ local LAST_ERR=/tmp/tmp.G4P46wspU3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BTaJR7Z4XP ++ cat /tmp/tmp.G4P46wspU3 ++ rm /tmp/tmp.BTaJR7Z4XP /tmp/tmp.G4P46wspU3 ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S8qxCeWSFj +++ mktemp ++ local LAST_ERR=/tmp/tmp.8TwTWmfVlm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S8qxCeWSFj ++ cat /tmp/tmp.8TwTWmfVlm ++ rm /tmp/tmp.S8qxCeWSFj /tmp/tmp.8TwTWmfVlm ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X9M84b5zDT +++ mktemp ++ local LAST_ERR=/tmp/tmp.NcVG7D1SKI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X9M84b5zDT ++ cat /tmp/tmp.NcVG7D1SKI ++ rm /tmp/tmp.X9M84b5zDT /tmp/tmp.NcVG7D1SKI ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zv8UpRpSgV +++ mktemp ++ local LAST_ERR=/tmp/tmp.qNOV8YxuS5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zv8UpRpSgV ++ cat /tmp/tmp.qNOV8YxuS5 ++ rm /tmp/tmp.zv8UpRpSgV /tmp/tmp.qNOV8YxuS5 ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LKZ8aABu0t +++ mktemp ++ local LAST_ERR=/tmp/tmp.ikmtD3edzr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LKZ8aABu0t ++ cat /tmp/tmp.ikmtD3edzr ++ rm /tmp/tmp.LKZ8aABu0t /tmp/tmp.ikmtD3edzr ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T15sWk3l7O +++ mktemp ++ local LAST_ERR=/tmp/tmp.dyW50pZ7LE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T15sWk3l7O ++ cat /tmp/tmp.dyW50pZ7LE ++ rm /tmp/tmp.T15sWk3l7O /tmp/tmp.dyW50pZ7LE ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.uTo6rIgWgH ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hQlWG295od +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.uTo6rIgWgH +++++ cat /tmp/tmp.hQlWG295od +++++ rm /tmp/tmp.uTo6rIgWgH /tmp/tmp.hQlWG295od +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ypv167F8E0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.86ZHDY3KAW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ypv167F8E0 +++++ cat /tmp/tmp.86ZHDY3KAW +++++ rm /tmp/tmp.ypv167F8E0 /tmp/tmp.86ZHDY3KAW +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x9rztN5ASV +++ mktemp ++ local LAST_ERR=/tmp/tmp.0IqaARgdt0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x9rztN5ASV ++ cat /tmp/tmp.0IqaARgdt0 ++ rm /tmp/tmp.x9rztN5ASV /tmp/tmp.0IqaARgdt0 ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JANu0yllFm +++ mktemp ++ local LAST_ERR=/tmp/tmp.9Yx7bLELve ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JANu0yllFm ++ cat /tmp/tmp.9Yx7bLELve ++ rm /tmp/tmp.JANu0yllFm /tmp/tmp.9Yx7bLELve ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.AIh4ASubZF/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.dDGmc9Puam ++ mktemp + local LAST_ERR=/tmp/tmp.jQXCaQz2Wp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dDGmc9Puam secret/my-cluster-secrets patched + cat /tmp/tmp.jQXCaQz2Wp + rm /tmp/tmp.dDGmc9Puam /tmp/tmp.jQXCaQz2Wp + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nlPrAYtoFt +++ mktemp ++ local LAST_ERR=/tmp/tmp.gv7MLZeCFq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nlPrAYtoFt ++ cat /tmp/tmp.gv7MLZeCFq ++ rm /tmp/tmp.nlPrAYtoFt /tmp/tmp.gv7MLZeCFq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aDtk6xZ0xO +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jew38hjeLB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aDtk6xZ0xO ++ cat /tmp/tmp.Jew38hjeLB ++ rm /tmp/tmp.aDtk6xZ0xO /tmp/tmp.Jew38hjeLB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hSdAQwd4c4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VcVoVs6Znf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hSdAQwd4c4 ++ cat /tmp/tmp.VcVoVs6Znf ++ rm /tmp/tmp.hSdAQwd4c4 /tmp/tmp.VcVoVs6Znf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fx6wBbgBDm +++ mktemp ++ local LAST_ERR=/tmp/tmp.PokfNFhi8c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fx6wBbgBDm ++ cat /tmp/tmp.PokfNFhi8c ++ rm /tmp/tmp.fx6wBbgBDm /tmp/tmp.PokfNFhi8c ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Gl4Iqy6RAn +++ mktemp ++ local LAST_ERR=/tmp/tmp.W2NoDNeaDA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Gl4Iqy6RAn ++ cat /tmp/tmp.W2NoDNeaDA ++ rm /tmp/tmp.Gl4Iqy6RAn /tmp/tmp.W2NoDNeaDA ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vVzefKqTrH ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.WvvRoVC7bs +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vVzefKqTrH +++++ cat /tmp/tmp.WvvRoVC7bs +++++ rm /tmp/tmp.vVzefKqTrH /tmp/tmp.WvvRoVC7bs +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.SkpeKJFNDZ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SX4co5JqfH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.SkpeKJFNDZ +++++ cat /tmp/tmp.SX4co5JqfH +++++ rm /tmp/tmp.SkpeKJFNDZ /tmp/tmp.SX4co5JqfH +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SwlCCalHWl +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lvm90R2Q8b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SwlCCalHWl ++ cat /tmp/tmp.Lvm90R2Q8b ++ rm /tmp/tmp.SwlCCalHWl /tmp/tmp.Lvm90R2Q8b ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3e6L2jOJ8b +++ mktemp ++ local LAST_ERR=/tmp/tmp.xDm3etwCZK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3e6L2jOJ8b ++ cat /tmp/tmp.xDm3etwCZK ++ rm /tmp/tmp.3e6L2jOJ8b /tmp/tmp.xDm3etwCZK ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.AIh4ASubZF/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.hLRRf2c7M4 ++ mktemp + local LAST_ERR=/tmp/tmp.iKtZI5K9Po + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hLRRf2c7M4 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.iKtZI5K9Po + rm /tmp/tmp.hLRRf2c7M4 /tmp/tmp.iKtZI5K9Po + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xouIsvSYud +++ mktemp ++ local LAST_ERR=/tmp/tmp.VJ1oGv4DP5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xouIsvSYud ++ cat /tmp/tmp.VJ1oGv4DP5 ++ rm /tmp/tmp.xouIsvSYud /tmp/tmp.VJ1oGv4DP5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d0Nq4zDm79 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AbZuCLK3Cs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d0Nq4zDm79 ++ cat /tmp/tmp.AbZuCLK3Cs ++ rm /tmp/tmp.d0Nq4zDm79 /tmp/tmp.AbZuCLK3Cs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.djK82koXdN +++ mktemp ++ local LAST_ERR=/tmp/tmp.j9OwLbwW8K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.djK82koXdN ++ cat /tmp/tmp.j9OwLbwW8K ++ rm /tmp/tmp.djK82koXdN /tmp/tmp.j9OwLbwW8K ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DamWgYMSoY +++ mktemp ++ local LAST_ERR=/tmp/tmp.dQVj82geH8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DamWgYMSoY ++ cat /tmp/tmp.dQVj82geH8 ++ rm /tmp/tmp.DamWgYMSoY /tmp/tmp.dQVj82geH8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8vLLSNj3Ka +++ mktemp ++ local LAST_ERR=/tmp/tmp.MxDNu42SbL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8vLLSNj3Ka ++ cat /tmp/tmp.MxDNu42SbL ++ rm /tmp/tmp.8vLLSNj3Ka /tmp/tmp.MxDNu42SbL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m28YK50F3q +++ mktemp ++ local LAST_ERR=/tmp/tmp.yvHMItmcbN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m28YK50F3q ++ cat /tmp/tmp.yvHMItmcbN ++ rm /tmp/tmp.m28YK50F3q /tmp/tmp.yvHMItmcbN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aM6sFhszpU +++ mktemp ++ local LAST_ERR=/tmp/tmp.9DgJILt5pD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aM6sFhszpU ++ cat /tmp/tmp.9DgJILt5pD ++ rm /tmp/tmp.aM6sFhszpU /tmp/tmp.9DgJILt5pD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A4rCbsMkkv +++ mktemp ++ local LAST_ERR=/tmp/tmp.fMrtrFcvZb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.A4rCbsMkkv ++ cat /tmp/tmp.fMrtrFcvZb ++ rm /tmp/tmp.A4rCbsMkkv /tmp/tmp.fMrtrFcvZb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hwYJDZSZUc +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hnv67CgtFE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hwYJDZSZUc ++ cat /tmp/tmp.Hnv67CgtFE ++ rm /tmp/tmp.hwYJDZSZUc /tmp/tmp.Hnv67CgtFE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7ONpLdTfwE +++ mktemp ++ local LAST_ERR=/tmp/tmp.aWoJAWqlxo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7ONpLdTfwE ++ cat /tmp/tmp.aWoJAWqlxo ++ rm /tmp/tmp.7ONpLdTfwE /tmp/tmp.aWoJAWqlxo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2GrKTqA2vF +++ mktemp ++ local LAST_ERR=/tmp/tmp.apsYSsyt1p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2GrKTqA2vF ++ cat /tmp/tmp.apsYSsyt1p ++ rm /tmp/tmp.2GrKTqA2vF /tmp/tmp.apsYSsyt1p ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p2uvn4LERJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.1td9etjJK7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p2uvn4LERJ ++ cat /tmp/tmp.1td9etjJK7 ++ rm /tmp/tmp.p2uvn4LERJ /tmp/tmp.1td9etjJK7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ymtjF5HdeC +++ mktemp ++ local LAST_ERR=/tmp/tmp.VhbzgPLuij ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ymtjF5HdeC ++ cat /tmp/tmp.VhbzgPLuij ++ rm /tmp/tmp.ymtjF5HdeC /tmp/tmp.VhbzgPLuij ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b6zjycWahm +++ mktemp ++ local LAST_ERR=/tmp/tmp.GXvFqpQjil ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b6zjycWahm ++ cat /tmp/tmp.GXvFqpQjil ++ rm /tmp/tmp.b6zjycWahm /tmp/tmp.GXvFqpQjil ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OGihRhcOfz +++ mktemp ++ local LAST_ERR=/tmp/tmp.ILbd6vRsJ6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OGihRhcOfz ++ cat /tmp/tmp.ILbd6vRsJ6 ++ rm /tmp/tmp.OGihRhcOfz /tmp/tmp.ILbd6vRsJ6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SfpdwLWoZz +++ mktemp ++ local LAST_ERR=/tmp/tmp.tsgD9cUMAK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SfpdwLWoZz ++ cat /tmp/tmp.tsgD9cUMAK ++ rm /tmp/tmp.SfpdwLWoZz /tmp/tmp.tsgD9cUMAK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KJ6VxzMxTO +++ mktemp ++ local LAST_ERR=/tmp/tmp.rs90EHXjal ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KJ6VxzMxTO ++ cat /tmp/tmp.rs90EHXjal ++ rm /tmp/tmp.KJ6VxzMxTO /tmp/tmp.rs90EHXjal ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fApSNtJt4W +++ mktemp ++ local LAST_ERR=/tmp/tmp.kkGBOnxl0q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fApSNtJt4W ++ cat /tmp/tmp.kkGBOnxl0q ++ rm /tmp/tmp.fApSNtJt4W /tmp/tmp.kkGBOnxl0q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1lq8POq7nT +++ mktemp ++ local LAST_ERR=/tmp/tmp.DBxlyhxKu6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1lq8POq7nT ++ cat /tmp/tmp.DBxlyhxKu6 ++ rm /tmp/tmp.1lq8POq7nT /tmp/tmp.DBxlyhxKu6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WplZQAw6sM +++ mktemp ++ local LAST_ERR=/tmp/tmp.cxf7yQGQ82 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WplZQAw6sM ++ cat /tmp/tmp.cxf7yQGQ82 ++ rm /tmp/tmp.WplZQAw6sM /tmp/tmp.cxf7yQGQ82 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A39KrPPS0E +++ mktemp ++ local LAST_ERR=/tmp/tmp.3JVkdJltYi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.A39KrPPS0E ++ cat /tmp/tmp.3JVkdJltYi ++ rm /tmp/tmp.A39KrPPS0E /tmp/tmp.3JVkdJltYi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hZML6990Tq +++ mktemp ++ local LAST_ERR=/tmp/tmp.eCRqPx4ocN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hZML6990Tq ++ cat /tmp/tmp.eCRqPx4ocN ++ rm /tmp/tmp.hZML6990Tq /tmp/tmp.eCRqPx4ocN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.smvRynPCjJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.J5b05uRZ9W ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.smvRynPCjJ ++ cat /tmp/tmp.J5b05uRZ9W ++ rm /tmp/tmp.smvRynPCjJ /tmp/tmp.J5b05uRZ9W ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1jhVaAu2Xm +++ mktemp ++ local LAST_ERR=/tmp/tmp.UHdGmZRgro ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1jhVaAu2Xm ++ cat /tmp/tmp.UHdGmZRgro ++ rm /tmp/tmp.1jhVaAu2Xm /tmp/tmp.UHdGmZRgro ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7R5I3OpKN3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.aesqE5pIlH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7R5I3OpKN3 +++++ cat /tmp/tmp.aesqE5pIlH +++++ rm /tmp/tmp.7R5I3OpKN3 /tmp/tmp.aesqE5pIlH +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FNwdzxZ9La ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.zJjJKK7VCn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FNwdzxZ9La +++++ cat /tmp/tmp.zJjJKK7VCn +++++ rm /tmp/tmp.FNwdzxZ9La /tmp/tmp.zJjJKK7VCn +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BZ67h9b2Bl +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jbhs64PF63 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BZ67h9b2Bl ++ cat /tmp/tmp.Jbhs64PF63 ++ rm /tmp/tmp.BZ67h9b2Bl /tmp/tmp.Jbhs64PF63 ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.BszeKUXKL4 ++ mktemp + local LAST_ERR=/tmp/tmp.KnZsO9PUIl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BszeKUXKL4 secret/my-cluster-secrets-2 patched + cat /tmp/tmp.KnZsO9PUIl + rm /tmp/tmp.BszeKUXKL4 /tmp/tmp.KnZsO9PUIl + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g3tiOX3tXH +++ mktemp ++ local LAST_ERR=/tmp/tmp.yE6X8Qu4Un ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g3tiOX3tXH ++ cat /tmp/tmp.yE6X8Qu4Un ++ rm /tmp/tmp.g3tiOX3tXH /tmp/tmp.yE6X8Qu4Un ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qtNNAF1sQh +++ mktemp ++ local LAST_ERR=/tmp/tmp.ESHTaSK1GR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qtNNAF1sQh ++ cat /tmp/tmp.ESHTaSK1GR ++ rm /tmp/tmp.qtNNAF1sQh /tmp/tmp.ESHTaSK1GR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iDP5dV0Dpq +++ mktemp ++ local LAST_ERR=/tmp/tmp.2jVGGswuFl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iDP5dV0Dpq ++ cat /tmp/tmp.2jVGGswuFl ++ rm /tmp/tmp.iDP5dV0Dpq /tmp/tmp.2jVGGswuFl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lJtWNSeMD1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8pvlANfDHZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lJtWNSeMD1 ++ cat /tmp/tmp.8pvlANfDHZ ++ rm /tmp/tmp.lJtWNSeMD1 /tmp/tmp.8pvlANfDHZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rkTGFH0ZiU +++ mktemp ++ local LAST_ERR=/tmp/tmp.vnEapaKe6p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rkTGFH0ZiU ++ cat /tmp/tmp.vnEapaKe6p ++ rm /tmp/tmp.rkTGFH0ZiU /tmp/tmp.vnEapaKe6p ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w8y9jed0ut +++ mktemp ++ local LAST_ERR=/tmp/tmp.cNxJbgRF3Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w8y9jed0ut ++ cat /tmp/tmp.cNxJbgRF3Q ++ rm /tmp/tmp.w8y9jed0ut /tmp/tmp.cNxJbgRF3Q ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RIU74s4Dcx ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SCHjFcVS8J +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RIU74s4Dcx +++++ cat /tmp/tmp.SCHjFcVS8J +++++ rm /tmp/tmp.RIU74s4Dcx /tmp/tmp.SCHjFcVS8J +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5WlFcQ9zQZ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.XQsOLjwt88 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5WlFcQ9zQZ +++++ cat /tmp/tmp.XQsOLjwt88 +++++ rm /tmp/tmp.5WlFcQ9zQZ /tmp/tmp.XQsOLjwt88 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J3cJIi1tC1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4FH4d8lSKV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J3cJIi1tC1 ++ cat /tmp/tmp.4FH4d8lSKV ++ rm /tmp/tmp.J3cJIi1tC1 /tmp/tmp.4FH4d8lSKV ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bbgB2wAK2B +++ mktemp ++ local LAST_ERR=/tmp/tmp.tLOB3xmYQU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bbgB2wAK2B ++ cat /tmp/tmp.tLOB3xmYQU ++ rm /tmp/tmp.bbgB2wAK2B /tmp/tmp.tLOB3xmYQU ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.AIh4ASubZF/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.uNudBKVkpL +++ mktemp ++ local LAST_ERR=/tmp/tmp.xPDZp72HDx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uNudBKVkpL ++ cat /tmp/tmp.xPDZp72HDx ++ rm /tmp/tmp.uNudBKVkpL /tmp/tmp.xPDZp72HDx ++ return 0 + newpass='-^&uS09V_=W0g@c9qk2' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''-^&uS09V_=W0g@c9qk2'\'';' '-h some-name-pxc -uroot -p'\''-^&uS09V_=W0g@c9qk2'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''-^&uS09V_=W0g@c9qk2'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''-^&uS09V_=W0g@c9qk2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wpxkWTeFyu +++ mktemp ++ local LAST_ERR=/tmp/tmp.B4btIF1OQe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wpxkWTeFyu ++ cat /tmp/tmp.B4btIF1OQe ++ rm /tmp/tmp.wpxkWTeFyu /tmp/tmp.B4btIF1OQe ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''-^&uS09V_=W0g@c9qk2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''-^&uS09V_=W0g@c9qk2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''-^&uS09V_=W0g@c9qk2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''-^&uS09V_=W0g@c9qk2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UByFKovrwB +++ mktemp ++ local LAST_ERR=/tmp/tmp.dq6RXvwEjI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UByFKovrwB ++ cat /tmp/tmp.dq6RXvwEjI ++ rm /tmp/tmp.UByFKovrwB /tmp/tmp.dq6RXvwEjI ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.AIh4ASubZF/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.OTl2WHKbjB +++ mktemp ++ local LAST_ERR=/tmp/tmp.PPvum8JobE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OTl2WHKbjB ++ cat /tmp/tmp.PPvum8JobE ++ rm /tmp/tmp.OTl2WHKbjB /tmp/tmp.PPvum8JobE ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.gwp9bBbaGO ++ mktemp + local LAST_ERR=/tmp/tmp.OYCpPDm5iC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gwp9bBbaGO secret/my-cluster-secrets-2 configured + cat /tmp/tmp.OYCpPDm5iC Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.gwp9bBbaGO /tmp/tmp.OYCpPDm5iC + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hQQPWgcA4V +++ mktemp ++ local LAST_ERR=/tmp/tmp.EEw6W6csBj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hQQPWgcA4V ++ cat /tmp/tmp.EEw6W6csBj ++ rm /tmp/tmp.hQQPWgcA4V /tmp/tmp.EEw6W6csBj ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.AIh4ASubZF/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/some-name.yml + local pvc_name= + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.2mwFJ5bcSM + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2234-07fcc419#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-13701~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + local LAST_ERR=/tmp/tmp.o32cBViIdj + local exit_status=0 + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2mwFJ5bcSM perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.o32cBViIdj + rm /tmp/tmp.2mwFJ5bcSM /tmp/tmp.o32cBViIdj + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sHygqpF16Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.EzLTNREtNJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sHygqpF16Y ++ cat /tmp/tmp.EzLTNREtNJ ++ rm /tmp/tmp.sHygqpF16Y /tmp/tmp.EzLTNREtNJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ao9VthEkdg +++ mktemp ++ local LAST_ERR=/tmp/tmp.uumY3xqWrT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ao9VthEkdg ++ cat /tmp/tmp.uumY3xqWrT ++ rm /tmp/tmp.ao9VthEkdg /tmp/tmp.uumY3xqWrT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o3VANvUqLO +++ mktemp ++ local LAST_ERR=/tmp/tmp.26Z7sxExSO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o3VANvUqLO ++ cat /tmp/tmp.26Z7sxExSO ++ rm /tmp/tmp.o3VANvUqLO /tmp/tmp.26Z7sxExSO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hgiAoWjuJl +++ mktemp ++ local LAST_ERR=/tmp/tmp.xTLqI1uv7D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hgiAoWjuJl ++ cat /tmp/tmp.xTLqI1uv7D ++ rm /tmp/tmp.hgiAoWjuJl /tmp/tmp.xTLqI1uv7D ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h0ZP64tEZi +++ mktemp ++ local LAST_ERR=/tmp/tmp.1d1AQ1IYHn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h0ZP64tEZi ++ cat /tmp/tmp.1d1AQ1IYHn ++ rm /tmp/tmp.h0ZP64tEZi /tmp/tmp.1d1AQ1IYHn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fX5jf5ijOI +++ mktemp ++ local LAST_ERR=/tmp/tmp.MdfmNHcNqF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fX5jf5ijOI ++ cat /tmp/tmp.MdfmNHcNqF ++ rm /tmp/tmp.fX5jf5ijOI /tmp/tmp.MdfmNHcNqF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GlcLcaTWb1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.T7YdIK5S2Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GlcLcaTWb1 ++ cat /tmp/tmp.T7YdIK5S2Q ++ rm /tmp/tmp.GlcLcaTWb1 /tmp/tmp.T7YdIK5S2Q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IoHBSIo1sy +++ mktemp ++ local LAST_ERR=/tmp/tmp.7SEVozAlAA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IoHBSIo1sy ++ cat /tmp/tmp.7SEVozAlAA ++ rm /tmp/tmp.IoHBSIo1sy /tmp/tmp.7SEVozAlAA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sG9mHnfJbU +++ mktemp ++ local LAST_ERR=/tmp/tmp.AzUmjgsK7W ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sG9mHnfJbU ++ cat /tmp/tmp.AzUmjgsK7W ++ rm /tmp/tmp.sG9mHnfJbU /tmp/tmp.AzUmjgsK7W ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JRRbiPXtm7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uP5IcYMsY0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JRRbiPXtm7 ++ cat /tmp/tmp.uP5IcYMsY0 ++ rm /tmp/tmp.JRRbiPXtm7 /tmp/tmp.uP5IcYMsY0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IxDUBpvnez +++ mktemp ++ local LAST_ERR=/tmp/tmp.73gOUtGOEZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IxDUBpvnez ++ cat /tmp/tmp.73gOUtGOEZ ++ rm /tmp/tmp.IxDUBpvnez /tmp/tmp.73gOUtGOEZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xm8TokQeJB +++ mktemp ++ local LAST_ERR=/tmp/tmp.UUuqJsKOHU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xm8TokQeJB ++ cat /tmp/tmp.UUuqJsKOHU ++ rm /tmp/tmp.xm8TokQeJB /tmp/tmp.UUuqJsKOHU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FzFpBjMiat +++ mktemp ++ local LAST_ERR=/tmp/tmp.TEuV3xSEOy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FzFpBjMiat ++ cat /tmp/tmp.TEuV3xSEOy ++ rm /tmp/tmp.FzFpBjMiat /tmp/tmp.TEuV3xSEOy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zV3wA2yMxo +++ mktemp ++ local LAST_ERR=/tmp/tmp.dwgvkFMKqc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zV3wA2yMxo ++ cat /tmp/tmp.dwgvkFMKqc ++ rm /tmp/tmp.zV3wA2yMxo /tmp/tmp.dwgvkFMKqc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DrYuUXdIS6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mWkgxutENg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DrYuUXdIS6 ++ cat /tmp/tmp.mWkgxutENg ++ rm /tmp/tmp.DrYuUXdIS6 /tmp/tmp.mWkgxutENg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MUUQ71OICn +++ mktemp ++ local LAST_ERR=/tmp/tmp.WowdxvnWqn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MUUQ71OICn ++ cat /tmp/tmp.WowdxvnWqn ++ rm /tmp/tmp.MUUQ71OICn /tmp/tmp.WowdxvnWqn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bcue2YsU1s +++ mktemp ++ local LAST_ERR=/tmp/tmp.W0rngO4kvM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bcue2YsU1s ++ cat /tmp/tmp.W0rngO4kvM ++ rm /tmp/tmp.bcue2YsU1s /tmp/tmp.W0rngO4kvM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6ka2gIGWhL +++ mktemp ++ local LAST_ERR=/tmp/tmp.vgEiPYEEYs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6ka2gIGWhL ++ cat /tmp/tmp.vgEiPYEEYs ++ rm /tmp/tmp.6ka2gIGWhL /tmp/tmp.vgEiPYEEYs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mGx2lJahJ7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yiOVQXNmdw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mGx2lJahJ7 ++ cat /tmp/tmp.yiOVQXNmdw ++ rm /tmp/tmp.mGx2lJahJ7 /tmp/tmp.yiOVQXNmdw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.diL5e8ayeJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.h3OYSZHQNe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.diL5e8ayeJ ++ cat /tmp/tmp.h3OYSZHQNe ++ rm /tmp/tmp.diL5e8ayeJ /tmp/tmp.h3OYSZHQNe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.opXOmLKPiL +++ mktemp ++ local LAST_ERR=/tmp/tmp.i1rGFbSOTT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.opXOmLKPiL ++ cat /tmp/tmp.i1rGFbSOTT ++ rm /tmp/tmp.opXOmLKPiL /tmp/tmp.i1rGFbSOTT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XSfMFBDQe5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EaQb0KI1md ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XSfMFBDQe5 ++ cat /tmp/tmp.EaQb0KI1md ++ rm /tmp/tmp.XSfMFBDQe5 /tmp/tmp.EaQb0KI1md ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rALBJluM82 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ixXoZEzWlv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rALBJluM82 ++ cat /tmp/tmp.ixXoZEzWlv ++ rm /tmp/tmp.rALBJluM82 /tmp/tmp.ixXoZEzWlv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.06aO1oMLIi +++ mktemp ++ local LAST_ERR=/tmp/tmp.oaRzs3lv8l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.06aO1oMLIi ++ cat /tmp/tmp.oaRzs3lv8l ++ rm /tmp/tmp.06aO1oMLIi /tmp/tmp.oaRzs3lv8l ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MsgLgQdZ9Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.3cgVrDQCRZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MsgLgQdZ9Z ++ cat /tmp/tmp.3cgVrDQCRZ ++ rm /tmp/tmp.MsgLgQdZ9Z /tmp/tmp.3cgVrDQCRZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CJJcLg3eDk +++ mktemp ++ local LAST_ERR=/tmp/tmp.55c4NtXKP9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CJJcLg3eDk ++ cat /tmp/tmp.55c4NtXKP9 ++ rm /tmp/tmp.CJJcLg3eDk /tmp/tmp.55c4NtXKP9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hQRMMPOMt7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mkRFci2oTA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hQRMMPOMt7 ++ cat /tmp/tmp.mkRFci2oTA ++ rm /tmp/tmp.hQRMMPOMt7 /tmp/tmp.mkRFci2oTA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B8eAZlNJRc +++ mktemp ++ local LAST_ERR=/tmp/tmp.ujrJWT6u1X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B8eAZlNJRc ++ cat /tmp/tmp.ujrJWT6u1X ++ rm /tmp/tmp.B8eAZlNJRc /tmp/tmp.ujrJWT6u1X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s6Cw3m0a9u +++ mktemp ++ local LAST_ERR=/tmp/tmp.rywBIsexbZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s6Cw3m0a9u ++ cat /tmp/tmp.rywBIsexbZ ++ rm /tmp/tmp.s6Cw3m0a9u /tmp/tmp.rywBIsexbZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yNZFDN3Qez +++ mktemp ++ local LAST_ERR=/tmp/tmp.QapGzAXwjZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yNZFDN3Qez ++ cat /tmp/tmp.QapGzAXwjZ ++ rm /tmp/tmp.yNZFDN3Qez /tmp/tmp.QapGzAXwjZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D0ezbu0jT1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.x40flHqIAV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D0ezbu0jT1 ++ cat /tmp/tmp.x40flHqIAV ++ rm /tmp/tmp.D0ezbu0jT1 /tmp/tmp.x40flHqIAV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ScHH5agW76 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kxapMRJ6wg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ScHH5agW76 ++ cat /tmp/tmp.kxapMRJ6wg ++ rm /tmp/tmp.ScHH5agW76 /tmp/tmp.kxapMRJ6wg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iS2YllrsFv +++ mktemp ++ local LAST_ERR=/tmp/tmp.a0XOl1GUI4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iS2YllrsFv ++ cat /tmp/tmp.a0XOl1GUI4 ++ rm /tmp/tmp.iS2YllrsFv /tmp/tmp.a0XOl1GUI4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JICaFdttH6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vvC4UgNLlw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JICaFdttH6 ++ cat /tmp/tmp.vvC4UgNLlw ++ rm /tmp/tmp.JICaFdttH6 /tmp/tmp.vvC4UgNLlw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CnnAdgVNZR +++ mktemp ++ local LAST_ERR=/tmp/tmp.UwGuRzFlLc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CnnAdgVNZR ++ cat /tmp/tmp.UwGuRzFlLc ++ rm /tmp/tmp.CnnAdgVNZR /tmp/tmp.UwGuRzFlLc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oADBHWRPC4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.XBe3fcdo7h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oADBHWRPC4 ++ cat /tmp/tmp.XBe3fcdo7h ++ rm /tmp/tmp.oADBHWRPC4 /tmp/tmp.XBe3fcdo7h ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PsZUaztHJb +++ mktemp ++ local LAST_ERR=/tmp/tmp.GIeXNnD65t ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PsZUaztHJb ++ cat /tmp/tmp.GIeXNnD65t ++ rm /tmp/tmp.PsZUaztHJb /tmp/tmp.GIeXNnD65t ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p4rnlCS9y6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VubZctQ8g3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p4rnlCS9y6 ++ cat /tmp/tmp.VubZctQ8g3 ++ rm /tmp/tmp.p4rnlCS9y6 /tmp/tmp.VubZctQ8g3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dTSJwRqWA2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uANbM9xnBy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dTSJwRqWA2 ++ cat /tmp/tmp.uANbM9xnBy ++ rm /tmp/tmp.dTSJwRqWA2 /tmp/tmp.uANbM9xnBy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H3rlgsTXIX +++ mktemp ++ local LAST_ERR=/tmp/tmp.hgdz5EChw2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H3rlgsTXIX ++ cat /tmp/tmp.hgdz5EChw2 ++ rm /tmp/tmp.H3rlgsTXIX /tmp/tmp.hgdz5EChw2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nykCNNuJHJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.oPWUuBTWaM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nykCNNuJHJ ++ cat /tmp/tmp.oPWUuBTWaM ++ rm /tmp/tmp.nykCNNuJHJ /tmp/tmp.oPWUuBTWaM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rL0vVzkSvO +++ mktemp ++ local LAST_ERR=/tmp/tmp.reGdW9sYQG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rL0vVzkSvO ++ cat /tmp/tmp.reGdW9sYQG ++ rm /tmp/tmp.rL0vVzkSvO /tmp/tmp.reGdW9sYQG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AIWfwntBD5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.A4475zww87 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AIWfwntBD5 ++ cat /tmp/tmp.A4475zww87 ++ rm /tmp/tmp.AIWfwntBD5 /tmp/tmp.A4475zww87 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Nm97ZcSiUk +++ mktemp ++ local LAST_ERR=/tmp/tmp.26bWvLVHwW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Nm97ZcSiUk ++ cat /tmp/tmp.26bWvLVHwW ++ rm /tmp/tmp.Nm97ZcSiUk /tmp/tmp.26bWvLVHwW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HDFGcg5xhf +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ob4wL1FqvO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HDFGcg5xhf ++ cat /tmp/tmp.Ob4wL1FqvO ++ rm /tmp/tmp.HDFGcg5xhf /tmp/tmp.Ob4wL1FqvO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uKHg2ZHcy9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fk8M6w8SCk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uKHg2ZHcy9 ++ cat /tmp/tmp.fk8M6w8SCk ++ rm /tmp/tmp.uKHg2ZHcy9 /tmp/tmp.fk8M6w8SCk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0Aq6ocRKBJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ky5tZ0AdQ9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0Aq6ocRKBJ ++ cat /tmp/tmp.Ky5tZ0AdQ9 ++ rm /tmp/tmp.0Aq6ocRKBJ /tmp/tmp.Ky5tZ0AdQ9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.na0qNFMLlS +++ mktemp ++ local LAST_ERR=/tmp/tmp.SwvjWLPH5y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.na0qNFMLlS ++ cat /tmp/tmp.SwvjWLPH5y ++ rm /tmp/tmp.na0qNFMLlS /tmp/tmp.SwvjWLPH5y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z0k6PV6A03 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zI6yxHblGz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z0k6PV6A03 ++ cat /tmp/tmp.zI6yxHblGz ++ rm /tmp/tmp.z0k6PV6A03 /tmp/tmp.zI6yxHblGz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aG2GHW7wiq +++ mktemp ++ local LAST_ERR=/tmp/tmp.fvYNrXCcqt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aG2GHW7wiq ++ cat /tmp/tmp.fvYNrXCcqt ++ rm /tmp/tmp.aG2GHW7wiq /tmp/tmp.fvYNrXCcqt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P5XCDAcyGa +++ mktemp ++ local LAST_ERR=/tmp/tmp.lBSg4B1myv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P5XCDAcyGa ++ cat /tmp/tmp.lBSg4B1myv ++ rm /tmp/tmp.P5XCDAcyGa /tmp/tmp.lBSg4B1myv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R1ftPnc9EV +++ mktemp ++ local LAST_ERR=/tmp/tmp.rB5hWrpWoY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.R1ftPnc9EV ++ cat /tmp/tmp.rB5hWrpWoY ++ rm /tmp/tmp.R1ftPnc9EV /tmp/tmp.rB5hWrpWoY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 51 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GguvUteydE +++ mktemp ++ local LAST_ERR=/tmp/tmp.bGcbOlvPPG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GguvUteydE ++ cat /tmp/tmp.bGcbOlvPPG ++ rm /tmp/tmp.GguvUteydE /tmp/tmp.bGcbOlvPPG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 52 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QQfXWieX20 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VDc0GpGWcy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QQfXWieX20 ++ cat /tmp/tmp.VDc0GpGWcy ++ rm /tmp/tmp.QQfXWieX20 /tmp/tmp.VDc0GpGWcy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 53 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.te1F3CYW51 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q1haGGfsS7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.te1F3CYW51 ++ cat /tmp/tmp.Q1haGGfsS7 ++ rm /tmp/tmp.te1F3CYW51 /tmp/tmp.Q1haGGfsS7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 54 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.snUt6ER3az +++ mktemp ++ local LAST_ERR=/tmp/tmp.OueAwGa11F ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.snUt6ER3az ++ cat /tmp/tmp.OueAwGa11F ++ rm /tmp/tmp.snUt6ER3az /tmp/tmp.OueAwGa11F ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 55 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AQiPmkOCzW +++ mktemp ++ local LAST_ERR=/tmp/tmp.SLt5TIWPug ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AQiPmkOCzW ++ cat /tmp/tmp.SLt5TIWPug ++ rm /tmp/tmp.AQiPmkOCzW /tmp/tmp.SLt5TIWPug ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 56 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G6g0EbMuSb +++ mktemp ++ local LAST_ERR=/tmp/tmp.JOyXOW5S6s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G6g0EbMuSb ++ cat /tmp/tmp.JOyXOW5S6s ++ rm /tmp/tmp.G6g0EbMuSb /tmp/tmp.JOyXOW5S6s ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 57 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2qJnesnWu8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Cu5AH1LPis ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2qJnesnWu8 ++ cat /tmp/tmp.Cu5AH1LPis ++ rm /tmp/tmp.2qJnesnWu8 /tmp/tmp.Cu5AH1LPis ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 58 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nANaiza5qN +++ mktemp ++ local LAST_ERR=/tmp/tmp.qSLfaMjDKq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nANaiza5qN ++ cat /tmp/tmp.qSLfaMjDKq ++ rm /tmp/tmp.nANaiza5qN /tmp/tmp.qSLfaMjDKq ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P8KcGGE9kl +++ mktemp ++ local LAST_ERR=/tmp/tmp.g5WoW1TPmn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P8KcGGE9kl ++ cat /tmp/tmp.g5WoW1TPmn ++ rm /tmp/tmp.P8KcGGE9kl /tmp/tmp.g5WoW1TPmn ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.1pmEPwWCog ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UdRF9DStqz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.1pmEPwWCog +++++ cat /tmp/tmp.UdRF9DStqz +++++ rm /tmp/tmp.1pmEPwWCog /tmp/tmp.UdRF9DStqz +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WfdZmfPcmj +++ mktemp ++ local LAST_ERR=/tmp/tmp.yC9NOVuiTT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WfdZmfPcmj ++ cat /tmp/tmp.yC9NOVuiTT ++ rm /tmp/tmp.WfdZmfPcmj /tmp/tmp.yC9NOVuiTT ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L8pJCIXFGi +++ mktemp ++ local LAST_ERR=/tmp/tmp.pRkewo5NUj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L8pJCIXFGi ++ cat /tmp/tmp.pRkewo5NUj ++ rm /tmp/tmp.L8pJCIXFGi /tmp/tmp.pRkewo5NUj ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.p4OeT7iQoE ++ mktemp + local LAST_ERR=/tmp/tmp.7nXzdH2Sio + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.p4OeT7iQoE secret/my-cluster-secrets patched + cat /tmp/tmp.7nXzdH2Sio + rm /tmp/tmp.p4OeT7iQoE /tmp/tmp.7nXzdH2Sio + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OBkCNrfzji +++ mktemp ++ local LAST_ERR=/tmp/tmp.CWKv0cfC85 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OBkCNrfzji ++ cat /tmp/tmp.CWKv0cfC85 ++ rm /tmp/tmp.OBkCNrfzji /tmp/tmp.CWKv0cfC85 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gq7nzkAFGm +++ mktemp ++ local LAST_ERR=/tmp/tmp.kCwSYTcBYE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gq7nzkAFGm ++ cat /tmp/tmp.kCwSYTcBYE ++ rm /tmp/tmp.gq7nzkAFGm /tmp/tmp.kCwSYTcBYE ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.krb35CFx7D ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hLgGjSkxn1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.krb35CFx7D +++++ cat /tmp/tmp.hLgGjSkxn1 +++++ rm /tmp/tmp.krb35CFx7D /tmp/tmp.hLgGjSkxn1 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zEn7lPrbCT +++ mktemp ++ local LAST_ERR=/tmp/tmp.ndqLPD4LMc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zEn7lPrbCT ++ cat /tmp/tmp.ndqLPD4LMc ++ rm /tmp/tmp.zEn7lPrbCT /tmp/tmp.ndqLPD4LMc ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HqyXhHloKW +++ mktemp ++ local LAST_ERR=/tmp/tmp.6kf9DQNBwa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HqyXhHloKW ++ cat /tmp/tmp.6kf9DQNBwa ++ rm /tmp/tmp.HqyXhHloKW /tmp/tmp.6kf9DQNBwa ++ return 0 + client_pod=pxc-client-59944c5bbf-pl74r + wait_pod pxc-client-59944c5bbf-pl74r + local pod=pxc-client-59944c5bbf-pl74r + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-pl74r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-pl74r condition met waiting for pod/pxc-client-59944c5bbf-pl74r to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.AIh4ASubZF/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-3.sql /tmp/tmp.AIh4ASubZF/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AhwGi9E5DD +++ mktemp ++ local LAST_ERR=/tmp/tmp.M5ZQAvcW1S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AhwGi9E5DD ++ cat /tmp/tmp.M5ZQAvcW1S ++ rm /tmp/tmp.AhwGi9E5DD /tmp/tmp.M5ZQAvcW1S ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-13701 + local namespace=users-13701 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u + tee /tmp/tmp.AIh4ASubZF/operator.log ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.izmGd0CZxO +++ mktemp ++ local LAST_ERR=/tmp/tmp.FtHc3IkeKs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.izmGd0CZxO ++ cat /tmp/tmp.FtHc3IkeKs ++ rm /tmp/tmp.izmGd0CZxO /tmp/tmp.FtHc3IkeKs ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-7f686bcf7c-9v5w7 ++ mktemp + local LAST_OUT=/tmp/tmp.cc8QzmaBHX ++ mktemp + local LAST_ERR=/tmp/tmp.KdOq37EyVi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-7f686bcf7c-9v5w7 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cc8QzmaBHX + cat /tmp/tmp.KdOq37EyVi + rm /tmp/tmp.cc8QzmaBHX /tmp/tmp.KdOq37EyVi + return 0 } }, }, { }, }, { }, }, ""), }, { }, }, }, - }, - { - }, - { - }, - }, + }, ... // 16 identical fields ... // 16 identical fields 2025-12-02T15:16:57.670Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.14-gke.1033000"} 2025-12-02T15:16:57.671Z INFO setup Manager starting up {"gitCommit": "07fcc4192b7541a019fe8405954fe590f1f5010b", "gitBranch": "PR-2234-07fcc419", "buildTime": "2025-12-02T13:01:13Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-12-02T15:16:57.674Z INFO setup Registering Components. 2025-12-02T15:16:58.740Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-12-02T15:16:58.740Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-12-02T15:16:58.740Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-12-02T15:16:58.740Z INFO controller-runtime.metrics Starting metrics server 2025-12-02T15:16:58.740Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-12-02T15:16:58.740Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-12-02T15:16:58.740Z INFO controller-runtime.webhook Starting webhook server 2025-12-02T15:16:58.740Z INFO setup Starting the Cmd. 2025-12-02T15:16:58.740Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-12-02T15:16:58.841Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-12-02T15:16:58.875Z DEBUG events percona-xtradb-cluster-operator-7f686bcf7c-9v5w7_7ae7a0ce-ee9b-4766-a215-923f51084c8b became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"9bfaa8d7-b769-4c7d-9ad4-98c8cdb279ee","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1764688618868191009"}, "reason": "LeaderElection"} 2025-12-02T15:16:58.875Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2025-12-02T15:16:58.875Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-12-02T15:16:58.876Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-12-02T15:16:58.876Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-12-02T15:16:58.876Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-12-02T15:16:58.976Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2025-12-02T15:16:58.976Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2025-12-02T15:16:59.077Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2025-12-02T15:16:59.077Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2025-12-02T15:16:59.077Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2025-12-02T15:16:59.077Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2025-12-02T15:17:35.656Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "f455f516-b8d9-47a9-9305-2e21c92b5c48", "version": "1.19.0"} 2025-12-02T15:17:35.782Z INFO User secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "f455f516-b8d9-47a9-9305-2e21c92b5c48", "secrets": "my-cluster-secrets"} 2025-12-02T15:17:36.006Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "f455f516-b8d9-47a9-9305-2e21c92b5c48", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-12-02T15:17:36.131Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "f455f516-b8d9-47a9-9305-2e21c92b5c48", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-02T15:17:36.209Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "f455f516-b8d9-47a9-9305-2e21c92b5c48", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-02T15:17:36.293Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "f455f516-b8d9-47a9-9305-2e21c92b5c48", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-02T15:17:36.332Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "f455f516-b8d9-47a9-9305-2e21c92b5c48", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-02T15:17:36.368Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "f455f516-b8d9-47a9-9305-2e21c92b5c48", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-02T15:17:36.531Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "f455f516-b8d9-47a9-9305-2e21c92b5c48", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-02T15:17:37.424Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "5129b679-dc09-4116-ace8-95b96aa04364", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-02T15:17:37.448Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "5129b679-dc09-4116-ace8-95b96aa04364", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-02T15:18:53.703Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "243223bc-b821-4a42-9edc-ce5c03ca6761", "user": "operator"} 2025-12-02T15:18:53.739Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "243223bc-b821-4a42-9edc-ce5c03ca6761", "user": "monitor"} 2025-12-02T15:18:53.804Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "243223bc-b821-4a42-9edc-ce5c03ca6761"} 2025-12-02T15:18:53.841Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "243223bc-b821-4a42-9edc-ce5c03ca6761"} 2025-12-02T15:18:53.878Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "243223bc-b821-4a42-9edc-ce5c03ca6761", "user": "xtrabackup"} 2025-12-02T15:18:53.941Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "243223bc-b821-4a42-9edc-ce5c03ca6761"} 2025-12-02T15:18:54.003Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "243223bc-b821-4a42-9edc-ce5c03ca6761", "user": "replication"} 2025-12-02T15:18:54.012Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "243223bc-b821-4a42-9edc-ce5c03ca6761", "err": "get primary pxc pod: not found"} 2025-12-02T15:18:58.602Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d61df965-757c-420a-8edd-cbfb45aa0d3f", "err": "get primary pxc pod: not found"} 2025-12-02T15:19:03.736Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7209509a-ae22-4997-ae27-40b4cdbe7f8c", "err": "get primary pxc pod: not found"} 2025-12-02T15:19:08.871Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "46bb1845-c899-4955-af81-55a0b19c5507", "err": "get primary pxc pod: not found"} 2025-12-02T15:21:19.265Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "b7153b88-56eb-4392-afd6-6775e6607a82", "user": "root"} 2025-12-02T15:21:19.376Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "b7153b88-56eb-4392-afd6-6775e6607a82", "new version": "8.0.43-34.1"} 2025-12-02T15:21:21.086Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "b7153b88-56eb-4392-afd6-6775e6607a82"} 2025-12-02T15:21:26.114Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d9b2ec74-aef4-42dc-8719-dba13be20288"} 2025-12-02T15:21:31.805Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "4d129c96-ec27-4e2a-bf97-1da4d6122b0a"} 2025-12-02T15:21:36.981Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "e082cc27-b3c7-4c96-baba-924eddd6ae2f"} 2025-12-02T15:21:42.390Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d6c46627-8ee9-4920-b298-08fbe08b429e"} 2025-12-02T15:21:47.681Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "ca6e496a-f05e-48e7-831f-8bc533453c3a"} 2025-12-02T15:21:53.002Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "1134cb63-5a90-4007-8de3-6476bab309ae"} 2025-12-02T15:21:58.011Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "767e190e-a7c5-4330-9088-6162665fb839"} 2025-12-02T15:22:03.602Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7d10e93c-8bfe-48b5-91f7-5f88f163f01f"} 2025-12-02T15:22:08.878Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "19452df2-f1c2-44f9-b148-17239822f0e4"} 2025-12-02T15:22:14.183Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "b24cdb38-b800-4afa-8f6b-489ddee07441"} 2025-12-02T15:22:19.485Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "f9abe4ec-12bc-4a6f-9f08-319d7c9f4623"} 2025-12-02T15:22:24.885Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "157e80ff-b4fd-4a41-809b-037ca0dca487"} 2025-12-02T15:22:30.599Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "33ce5a39-35f6-4d72-8a20-21f2e6362add"} 2025-12-02T15:22:35.485Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "a5b7279f-9661-4585-9519-6ce663cc948b"} 2025-12-02T15:22:40.565Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "0e1a2ae1-7796-47ef-81ad-45d765bc7e91"} 2025-12-02T15:22:45.994Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "3e0940db-1613-456d-9fa1-4c648506fea3"} 2025-12-02T15:22:51.382Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "62b4429a-df04-44fc-85e7-63cef7fca0b8"} 2025-12-02T15:22:56.682Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "9386c391-9812-4428-9644-db9c423e9f55"} 2025-12-02T15:23:01.155Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "dfba54a7-f165-4f60-9948-db36f3810860", "user": "root"} 2025-12-02T15:23:01.185Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "dfba54a7-f165-4f60-9948-db36f3810860", "user": "root"} 2025-12-02T15:23:01.206Z INFO MySQL init secret created {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "dfba54a7-f165-4f60-9948-db36f3810860", "secret": "some-name-mysql-init", "user": "root"} 2025-12-02T15:23:02.492Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fcc14262-2e56-4817-af2a-3db033951c77"} 2025-12-02T15:23:03.278Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "dfba54a7-f165-4f60-9948-db36f3810860"} 2025-12-02T15:23:03.305Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "dfba54a7-f165-4f60-9948-db36f3810860", "user": "root"} 2025-12-02T15:23:03.326Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "dfba54a7-f165-4f60-9948-db36f3810860", "user": "root"} 2025-12-02T15:23:04.894Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "dfba54a7-f165-4f60-9948-db36f3810860"} 2025-12-02T15:23:07.485Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "dc36c977-b971-4811-b4cd-a1b340f2abc8"} 2025-12-02T15:23:12.584Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fc0b7b29-75a1-4370-8043-71e4ce89e54f"} 2025-12-02T15:23:18.078Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "3ccfb31d-d0de-40a7-9834-fe0ea542139b"} 2025-12-02T15:23:22.516Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "2718225a-e5ef-4cc8-b182-5d4461e05d9c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:23:22.587Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "2718225a-e5ef-4cc8-b182-5d4461e05d9c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:23:23.306Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "1ee37be7-4463-4b5f-a7eb-ad9868da42bb"} 2025-12-02T15:23:43.208Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "71b9a29f-c8e7-42de-aec0-61401cb74022", "err": "get primary pxc pod: not found"} 2025-12-02T15:23:43.965Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "5a1985f9-26ed-43bf-bafc-38d7722e41f0", "err": "get primary pxc pod: not found"} 2025-12-02T15:23:48.044Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "5a1985f9-26ed-43bf-bafc-38d7722e41f0", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-02T15:23:48.952Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "52e9f40b-3cb4-4d61-aa9f-b6ab547e09ae", "err": "get primary pxc pod: not found"} 2025-12-02T15:23:49.845Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "a40876b3-ad9d-4fa9-ab15-458c72b21e94", "user": "proxyadmin"} 2025-12-02T15:23:49.845Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "a40876b3-ad9d-4fa9-ab15-458c72b21e94", "user": "proxyadmin"} 2025-12-02T15:23:49.879Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "a40876b3-ad9d-4fa9-ab15-458c72b21e94", "user": "proxyadmin"} 2025-12-02T15:23:49.904Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "a40876b3-ad9d-4fa9-ab15-458c72b21e94", "user": "proxyadmin"} 2025-12-02T15:23:49.904Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "a40876b3-ad9d-4fa9-ab15-458c72b21e94", "last-applied-secret": "e9e51fc7d41db72e6f4e75c7f8e02cdd3c23f91bc42391d57ddac8fc9ef7a8bb"} 2025-12-02T15:23:49.908Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "a40876b3-ad9d-4fa9-ab15-458c72b21e94", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:23:50.006Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "a40876b3-ad9d-4fa9-ab15-458c72b21e94", "err": "get primary pxc pod: not found"} 2025-12-02T15:23:51.595Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "52e9f40b-3cb4-4d61-aa9f-b6ab547e09ae", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-02T15:24:34.903Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fe8805fe-ce29-4892-bdbd-be29dcc5f5fa"} 2025-12-02T15:24:39.793Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "5b605e08-3661-40f3-915c-fa116bfa70da", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:24:39.841Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "5b605e08-3661-40f3-915c-fa116bfa70da", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:24:39.934Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "acba8b42-6744-45a3-aa3c-511845bb820e"} 2025-12-02T15:24:41.502Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "86383070-67b6-4434-952d-f1466724e0fc", "user": "xtrabackup"} 2025-12-02T15:24:41.520Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "86383070-67b6-4434-952d-f1466724e0fc", "user": "xtrabackup"} 2025-12-02T15:24:41.545Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "86383070-67b6-4434-952d-f1466724e0fc", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-02T15:24:41.571Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "86383070-67b6-4434-952d-f1466724e0fc", "user": "xtrabackup"} 2025-12-02T15:24:41.585Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "86383070-67b6-4434-952d-f1466724e0fc", "user": "xtrabackup"} 2025-12-02T15:24:41.592Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "86383070-67b6-4434-952d-f1466724e0fc", "last-applied-secret": "94cf10beaee1e7590d3ff3944f274f110969dd1d84489b841e505bcd52db9854"} 2025-12-02T15:24:41.594Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "86383070-67b6-4434-952d-f1466724e0fc", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:24:44.011Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "86383070-67b6-4434-952d-f1466724e0fc"} 2025-12-02T15:25:34.776Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "4e6650a5-217e-4c92-98b3-32f143311f10", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-13701 on 34.118.224.10:53: no such host"} 2025-12-02T15:25:38.948Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "184dcdc5-69ff-4497-8e81-ed3f3b2d8f57", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-13701 on 34.118.224.10:53: no such host"} 2025-12-02T15:25:45.041Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "a4b96ba7-b790-4ba9-a789-2d1c5887cf22", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-13701 on 34.118.224.10:53: no such host"} 2025-12-02T15:26:22.283Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "1d4968d3-738f-475b-86a5-3a489bbcd511", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13701 on 34.118.224.10:53: no such host"} 2025-12-02T15:26:22.573Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "6b29391a-e459-43cb-ba34-8e96275a8a68", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13701 on 34.118.224.10:53: no such host"} 2025-12-02T15:26:28.316Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "b4b7168d-c759-4627-937a-8aed9851d838", "err": "failed to connect to pod some-name-pxc-0: invalid connection"} 2025-12-02T15:26:33.480Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "9a2f5318-f504-49d2-9f8b-9181224c5208", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:26:38.596Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d969fc54-c430-45a6-929d-537ea2ff8773", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:26:43.707Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "c063f215-9f09-464d-9710-b02e96395ea0", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:26:48.828Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "46de7dd0-d924-4911-a950-6d79804dc77a", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:26:53.967Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "89951f56-d3e1-42c1-a918-f4f8a6714139", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:26:59.112Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "c9fd3d30-b529-4097-a7f9-da58fc1b2120", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:27:04.242Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "41d9e3a5-b45b-4015-a566-79b5e84bf0e0", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:27:12.963Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "945552a8-f9c4-4129-9b97-0bedd0d4d029"} 2025-12-02T15:27:16.526Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "63a0b13e-07c3-48d3-a045-32b176a8a9dc"} 2025-12-02T15:27:18.138Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "58c9d29e-e717-46fa-a85b-30bf315db0ea", "user": "monitor"} 2025-12-02T15:27:18.154Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "58c9d29e-e717-46fa-a85b-30bf315db0ea", "user": "monitor"} 2025-12-02T15:27:18.170Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "58c9d29e-e717-46fa-a85b-30bf315db0ea", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-02T15:27:18.192Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "58c9d29e-e717-46fa-a85b-30bf315db0ea", "user": "monitor"} 2025-12-02T15:27:18.217Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "58c9d29e-e717-46fa-a85b-30bf315db0ea", "user": "monitor"} 2025-12-02T15:27:18.498Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "58c9d29e-e717-46fa-a85b-30bf315db0ea", "last-applied-secret": "8d0edb586f5281d46aa3999f9bcf11f2c4662abde7ba179f95d9d22c69cf5fbe"} 2025-12-02T15:27:18.502Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "58c9d29e-e717-46fa-a85b-30bf315db0ea", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:27:20.969Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "58c9d29e-e717-46fa-a85b-30bf315db0ea", "error": "syncusers: ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:718) : Failed to add the user (monitor) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "syncusers: ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:718) : Failed to add the user (monitor) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:979\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-02T15:28:04.980Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "eb4721c5-3d92-452d-8f89-a8ea2107b5cd", "user": "monitor"} 2025-12-02T15:28:06.831Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "eb4721c5-3d92-452d-8f89-a8ea2107b5cd"} 2025-12-02T15:28:10.009Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "878758dd-e965-4d7f-b037-5ea1b5c7c45c", "user": "monitor"} 2025-12-02T15:28:10.656Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "878758dd-e965-4d7f-b037-5ea1b5c7c45c", "user": "monitor"} 2025-12-02T15:28:10.668Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "878758dd-e965-4d7f-b037-5ea1b5c7c45c", "last-applied-secret": "8d0edb586f5281d46aa3999f9bcf11f2c4662abde7ba179f95d9d22c69cf5fbe"} 2025-12-02T15:28:12.437Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "878758dd-e965-4d7f-b037-5ea1b5c7c45c"} 2025-12-02T15:28:17.450Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "8d675cbf-1dd3-4f59-aacd-fd6abcfa85cc"} 2025-12-02T15:28:22.951Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "caed9bec-0b9a-4b38-8f5b-3ba760e62134"} 2025-12-02T15:28:28.117Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "0cd0a694-7dee-468f-bad4-6c78c7894c90"} 2025-12-02T15:28:33.555Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "cf9b63ee-8a73-4ceb-a201-ecc30fb55fac"} 2025-12-02T15:28:37.647Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "239d4e17-3910-445b-a119-ccc0c0a07744", "user": "operator"} 2025-12-02T15:28:37.664Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "239d4e17-3910-445b-a119-ccc0c0a07744", "user": "operator"} 2025-12-02T15:28:37.683Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "239d4e17-3910-445b-a119-ccc0c0a07744", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-02T15:28:37.715Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "239d4e17-3910-445b-a119-ccc0c0a07744", "user": "operator"} 2025-12-02T15:28:37.731Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "239d4e17-3910-445b-a119-ccc0c0a07744", "user": "operator"} 2025-12-02T15:28:37.755Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "239d4e17-3910-445b-a119-ccc0c0a07744", "last-applied-secret": "6ca20dab9936cfdaa5158604bcb6e8abc3578831dc11cf18e4295568e4231c0c"} 2025-12-02T15:28:37.758Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "239d4e17-3910-445b-a119-ccc0c0a07744", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:28:40.399Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "1d97a244-fa95-4ecd-91ae-a040301ab177", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local:3306) to ProxySQL\n / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR (line:359) : Server connection check failed. \n-- Could not connect to the server at some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local:3306 \n-- Please check the connection parameters and status.\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local:3306) to ProxySQL\n / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR (line:359) : Server connection check failed. \n-- Could not connect to the server at some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local:3306 \n-- Please check the connection parameters and status.\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13701.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-02T15:29:18.981Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "afbc81fc-2e79-4e38-84eb-19650d8018fc"} 2025-12-02T15:29:23.801Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "cfe8d349-c9cb-4d9e-8bfe-ac3e57c9f1d0"} 2025-12-02T15:29:28.781Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "cda23dbd-cbd6-4d28-baa5-5982164cc0a5"} 2025-12-02T15:29:32.626Z INFO Created user secrets {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "secrets": "my-cluster-secrets-2"} 2025-12-02T15:29:32.635Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "root"} 2025-12-02T15:29:32.661Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "root"} 2025-12-02T15:29:32.681Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "secret": "some-name-mysql-init", "user": "root"} 2025-12-02T15:29:36.249Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "91a34442-78df-4399-b78a-c6bda98436bd"} 2025-12-02T15:29:36.323Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f"} 2025-12-02T15:29:36.344Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "root"} 2025-12-02T15:29:36.366Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "root"} 2025-12-02T15:29:36.372Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "operator"} 2025-12-02T15:29:36.387Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "operator"} 2025-12-02T15:29:36.408Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-02T15:29:36.438Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "operator"} 2025-12-02T15:29:36.452Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "operator"} 2025-12-02T15:29:36.460Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "monitor"} 2025-12-02T15:29:36.476Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "monitor"} 2025-12-02T15:29:36.519Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-02T15:29:36.539Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "monitor"} 2025-12-02T15:29:36.563Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "monitor"} 2025-12-02T15:29:36.852Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "xtrabackup"} 2025-12-02T15:29:36.869Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "xtrabackup"} 2025-12-02T15:29:36.892Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-02T15:29:36.919Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "xtrabackup"} 2025-12-02T15:29:36.932Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "xtrabackup"} 2025-12-02T15:29:36.937Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "replication"} 2025-12-02T15:29:36.953Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "replication"} 2025-12-02T15:29:36.968Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "secret": "some-name-mysql-init", "user": "replication"} 2025-12-02T15:29:36.992Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "replication"} 2025-12-02T15:29:37.006Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "replication"} 2025-12-02T15:29:37.006Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "proxyadmin"} 2025-12-02T15:29:37.028Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "proxyadmin"} 2025-12-02T15:29:37.051Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "user": "proxyadmin"} 2025-12-02T15:29:37.051Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "last-applied-secret": "b3efb95f512a0f3d857d58f10cd2d7ce6e4fddeda82a217e2c46b4c227f3b9ee"} 2025-12-02T15:29:37.051Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "last-applied-secret": "b3efb95f512a0f3d857d58f10cd2d7ce6e4fddeda82a217e2c46b4c227f3b9ee"} 2025-12-02T15:29:37.054Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:29:37.120Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:29:38.694Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "7105340d-a9f9-41c2-961a-6a3ae72e1d2f", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-02T15:30:30.647Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "735d4662-b322-4417-b97f-91168ba0ab39", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-13701 on 34.118.224.10:53: no such host"} 2025-12-02T15:30:35.579Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "669f48db-25f0-4acb-b2d8-43b3d6073188", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-13701 on 34.118.224.10:53: no such host"} 2025-12-02T15:30:40.880Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "a1185524-0f67-4694-bdf0-265ac575600a", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-13701 on 34.118.224.10:53: no such host"} 2025-12-02T15:31:33.249Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "968ee35c-390f-4d42-8f37-9a0c7e605ea5", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13701 on 34.118.224.10:53: no such host"} 2025-12-02T15:31:38.397Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "1542a303-5777-440b-a6b0-edf9959de3fd", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:31:43.515Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "1045b981-d1fd-48b6-919e-cd2a745aff90", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:31:48.646Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fcb2ad73-e3fa-44c4-9daf-8408d970a2d2", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:31:53.775Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d02a1734-955d-4d4d-af08-a4db451ad0ba", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:31:58.906Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fe5e6667-bbac-41be-a060-c404661a03f6", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:32:04.035Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "9d8986a7-c86f-45c7-b4c2-60e058991739", "primary name": "some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local"} 2025-12-02T15:32:09.973Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d2584ce6-2977-42e2-a2a6-426d1e9fb3ec", "user": "monitor"} 2025-12-02T15:32:10.824Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d2584ce6-2977-42e2-a2a6-426d1e9fb3ec", "user": "monitor"} 2025-12-02T15:32:10.840Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d2584ce6-2977-42e2-a2a6-426d1e9fb3ec", "last-applied-secret": "b3efb95f512a0f3d857d58f10cd2d7ce6e4fddeda82a217e2c46b4c227f3b9ee"} 2025-12-02T15:32:13.536Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "258c4f50-d419-415f-985d-6b5f6ea1ef1d", "user": "operator"} 2025-12-02T15:32:13.553Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "258c4f50-d419-415f-985d-6b5f6ea1ef1d", "user": "operator"} 2025-12-02T15:32:13.578Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "258c4f50-d419-415f-985d-6b5f6ea1ef1d", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-02T15:32:13.599Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "258c4f50-d419-415f-985d-6b5f6ea1ef1d", "user": "operator"} 2025-12-02T15:32:13.617Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "258c4f50-d419-415f-985d-6b5f6ea1ef1d", "user": "operator"} 2025-12-02T15:32:13.640Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "258c4f50-d419-415f-985d-6b5f6ea1ef1d", "last-applied-secret": "309f19f524173f72bebdb91d5c26e3f848b36451897136b286e05fced2f1326d"} 2025-12-02T15:32:13.645Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "258c4f50-d419-415f-985d-6b5f6ea1ef1d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:32:15.512Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d2584ce6-2977-42e2-a2a6-426d1e9fb3ec", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: replication\nAdding user to ProxySQL: replication\n Added query rule for user: replication\nRemoving existing user from ProxySQL: operator\nAdding user to ProxySQL: operator\n Added query rule for user: operator\nRemoving existing user from ProxySQL: xtrabackup\n / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-13701.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: replication\nAdding user to ProxySQL: replication\n Added query rule for user: replication\nRemoving existing user from ProxySQL: operator\nAdding user to ProxySQL: operator\n Added query rule for user: operator\nRemoving existing user from ProxySQL: xtrabackup\n / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-02T15:33:00.561Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fd8adec3-361f-41fa-823b-9622c4d6fe0b"} 2025-12-02T15:33:04.007Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "98f5b586-5551-43bf-bbaf-6e9baabf3e62"} 2025-12-02T15:33:09.515Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "cb65d846-17aa-45fa-a56b-ee8c2d3f8833"} 2025-12-02T15:33:14.728Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "f87d71fb-ede3-4dab-835e-1ba47e79c3cf"} 2025-12-02T15:33:20.109Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "02a97cbb-b5cb-4a28-8752-8ad3e6d3f207"} 2025-12-02T15:33:25.510Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "cb3df365-0bed-436c-b02c-28a36e6d63a1"} 2025-12-02T15:33:31.248Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "4ec7c343-2b38-4c77-9f08-5d2d453c8c74"} 2025-12-02T15:33:35.835Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "59df6cad-5adc-4e2e-8afc-7d9c557e1771"} 2025-12-02T15:33:41.429Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "8ab6541c-cebb-4841-a895-b5b21a4cd74b"} 2025-12-02T15:33:46.897Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "27f395a4-0b90-4bfc-a782-b60504a6e0cc"} 2025-12-02T15:33:52.215Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "5286856e-4fb0-41cd-af49-5815d6ab9be7"} 2025-12-02T15:33:57.292Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "2e923509-eb6d-4ade-bf44-57a99b556541"} 2025-12-02T15:34:02.701Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "981fa8e1-3b64-454c-8a2f-b21ccff87b82"} 2025-12-02T15:34:07.894Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d07b1e9e-54f9-41c7-b57a-b16f1c9ffa22"} 2025-12-02T15:34:11.820Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "root"} 2025-12-02T15:34:11.845Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "root"} 2025-12-02T15:34:11.866Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "secret": "some-name-mysql-init", "user": "root"} 2025-12-02T15:34:14.576Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "c75eda89-9c39-4876-a09d-28ad4e9f4bec"} 2025-12-02T15:34:14.999Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a"} 2025-12-02T15:34:15.039Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "root"} 2025-12-02T15:34:15.063Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "root"} 2025-12-02T15:34:15.077Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "monitor"} 2025-12-02T15:34:15.092Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "monitor"} 2025-12-02T15:34:15.148Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-02T15:34:15.167Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "monitor"} 2025-12-02T15:34:15.221Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "monitor"} 2025-12-02T15:34:15.504Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "xtrabackup"} 2025-12-02T15:34:15.520Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "xtrabackup"} 2025-12-02T15:34:15.554Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-02T15:34:15.596Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "xtrabackup"} 2025-12-02T15:34:15.609Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "xtrabackup"} 2025-12-02T15:34:15.617Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "proxyadmin"} 2025-12-02T15:34:15.638Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "proxyadmin"} 2025-12-02T15:34:15.698Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "user": "proxyadmin"} 2025-12-02T15:34:15.698Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "last-applied-secret": "810871cf2d60f891b7bab2132d291bf614eaafcc4e48af38e28133d921af67f5"} 2025-12-02T15:34:15.698Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "last-applied-secret": "810871cf2d60f891b7bab2132d291bf614eaafcc4e48af38e28133d921af67f5"} 2025-12-02T15:34:15.701Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:34:15.875Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:34:17.867Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "fb3182ce-643a-442d-b130-b03b3383221a", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-02T15:34:31.736Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "91316270-fe84-4aa8-a51a-1d73f1966159", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:34:31.813Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "91316270-fe84-4aa8-a51a-1d73f1966159", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-02T15:34:31.926Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "91316270-fe84-4aa8-a51a-1d73f1966159", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-02T15:34:32.032Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "91316270-fe84-4aa8-a51a-1d73f1966159", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-02T15:34:32.132Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "91316270-fe84-4aa8-a51a-1d73f1966159", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-02T15:34:32.798Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "493a276c-c502-4788-a5c7-0de47ea51966", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-02T15:37:41.581Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "root"} 2025-12-02T15:37:41.605Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "root"} 2025-12-02T15:37:41.629Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "secret": "some-name-mysql-init", "user": "root"} 2025-12-02T15:37:41.662Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "root"} 2025-12-02T15:37:41.683Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "root"} 2025-12-02T15:37:41.689Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "operator"} 2025-12-02T15:37:41.707Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "operator"} 2025-12-02T15:37:41.730Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-02T15:37:41.759Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "operator"} 2025-12-02T15:37:41.777Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "operator"} 2025-12-02T15:37:41.785Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "monitor"} 2025-12-02T15:37:41.799Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "monitor"} 2025-12-02T15:37:41.820Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-02T15:37:41.839Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "monitor"} 2025-12-02T15:37:42.139Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "xtrabackup"} 2025-12-02T15:37:42.154Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "xtrabackup"} 2025-12-02T15:37:42.199Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-02T15:37:42.218Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "xtrabackup"} 2025-12-02T15:37:42.233Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "xtrabackup"} 2025-12-02T15:37:42.239Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "replication"} 2025-12-02T15:37:42.254Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "replication"} 2025-12-02T15:37:42.271Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "secret": "some-name-mysql-init", "user": "replication"} 2025-12-02T15:37:42.297Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "replication"} 2025-12-02T15:37:42.312Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "user": "replication"} 2025-12-02T15:37:42.312Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "last-applied-secret": "6ca20dab9936cfdaa5158604bcb6e8abc3578831dc11cf18e4295568e4231c0c"} 2025-12-02T15:37:42.314Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "888ef3d7-90b0-490f-94b8-5d36197c2e4a", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-02T15:37:43.290Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "468a4c8a-8957-41f1-bc86-e1058b299f35", "user": "monitor"} 2025-12-02T15:38:38.395Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "33360499-0f93-43ac-9d32-246516698600", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.202.113.62:33062: connect: connection refused"} 2025-12-02T15:40:13.937Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "63d130a3-3538-4e4c-9905-d197e04a2ac2", "user": "monitor"} 2025-12-02T15:40:15.169Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "63d130a3-3538-4e4c-9905-d197e04a2ac2", "user": "monitor"} 2025-12-02T15:40:20.503Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d04949fe-c462-40d7-b603-64b158c831e6", "user": "monitor"} 2025-12-02T15:40:20.520Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d04949fe-c462-40d7-b603-64b158c831e6", "user": "monitor"} 2025-12-02T15:40:20.535Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d04949fe-c462-40d7-b603-64b158c831e6", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-02T15:40:20.560Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d04949fe-c462-40d7-b603-64b158c831e6", "user": "monitor"} 2025-12-02T15:40:24.799Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "1226d776-30c8-4d94-827e-b1e9b9591fe4", "user": "monitor"} 2025-12-02T15:40:30.759Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "b820f50c-2ac0-4203-8046-db60fd18c705", "user": "monitor"} 2025-12-02T15:40:36.309Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "1863dbbd-c5f9-490b-8dbe-a76ca7dc7c13", "user": "monitor"} 2025-12-02T15:40:41.878Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "d1d60497-b253-4a59-846f-3f9360a73d2b", "user": "monitor"} 2025-12-02T15:40:47.537Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-13701"}, "namespace": "users-13701", "name": "some-name", "reconcileID": "b1f8b580-dc76-4daa-aa87-342ee0166474", "user": "monitor"} ... // 22 identical fields ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields - "309f19f524173f72bebdb91d5c26e3f848b36451897136b286e05fced2f1326d", + "309f19f524173f72bebdb91d5c26e3f848b36451897136b286e05fced2f1326d", + "3efb95f512a0f3d857d58f10cd2d7ce6e4fddeda82a217e2c46b4c227f3b9ee", ... // 3 identical elements ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields ... // 4 identical fields ... // 5 identical elements ... // 5 identical fields ... // 5 identical fields ... // 5 identical fields - "6ca20da", + "6ca20dab9936cfdaa5158604bcb6e8abc3578831dc11cf18e4295568e4231c0c", ... // 6 identical fields ... // 6 identical fields ... // 7 identical fields - "810871cf2d60f891b7bab2132d291bf614eaafcc4e48af38e28133d921af67f5", + "810871cf2d60f891b7bab2132d291bf614eaafcc4e48af38e28133d921af67f5", - "8d0edb586f5281d46aa3999f9bcf11f2c4662abde7ba179f95d9d22c69cf5fbe", + "8d0edb586f5281d46aa3999f9bcf11f2c4662abde7ba179f95d9d22c69cf5fbe", ... // 8 identical fields - "94cf10beaee1e7590d3ff3944f274f110969dd1d84489b841e505bcd52db9854", - "9936cfdaa5158604bcb6e8abc3578831dc11cf18e4295568e4231c0c", ... // 9 identical fields ... // 9 identical fields AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ + APIVersion: "", - APIVersion: "apps/v1", - APIVersion: "apps/v1", - APIVersion: "v1", Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, - Args: []string{"logrotate"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, "b", - "b3efb95f512a0f3d857d58f10cd2d7ce6e4fddeda82a217e2c46b4c227f3b9ee", + "b3efb95f512a0f3d857d58f10cd2d7ce6e4fddeda82a217e2c46b4c227f3b9ee", Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMapKeyRef: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-12-02 15:17:36 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-proxysql-568c66bc8b", - CurrentRevision: "some-name-proxysql-5c54d767f", - CurrentRevision: "some-name-proxysql-6486db9c7d", - CurrentRevision: "some-name-proxysql-697744cc75", - CurrentRevision: "some-name-proxysql-76b4f67745", - CurrentRevision: "some-name-proxysql-7f594fd985", - CurrentRevision: "some-name-pxc-57b756f95c", - CurrentRevision: "some-name-pxc-57dcdb9d76", - CurrentRevision: "some-name-pxc-6dd6474f4", - CurrentRevision: "some-name-pxc-f9479cd95", DataSource: nil, DataSourceRef: nil, - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", - "e9e51fc7d41db72e6f4e75c7f8e02cdd3c23f91bc42391d57ddac8fc9ef7a8bb", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, - EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, Env: []v1.EnvVar{ - Env: []v1.EnvVar{ EphemeralContainers: nil, FailureThreshold: 3, FC: nil, FieldPath: "metadata.name", FieldPath: "metadata.namespace", FieldRef: &v1.ObjectFieldSelector{ - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:856 HostAliases: nil, HostIP: "", HostPort: 0, - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", ImagePullPolicy: "Always", - ImagePullPolicy: "Always", InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "94cf10beaee1e7590d3ff3944f274f110969dd1d84489b841e505bcd52db9854", + "last-applied-secret": "e9e51fc7d41db72e6f4e75c7f8e02cdd3c23f91bc42391d57ddac8fc9ef7a8bb", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, [mysql] 2025/12/02 15:26:28 packets.go:58 read tcp 10.202.112.63:39712->10.202.112.65:33062: read: connection reset by peer Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, {Name: "CLUSTER_HASH", Value: "4015703"}, Name: "config", - {Name: "IS_LOGCOLLECTOR", Value: "yes"}, Name: "ist", {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - Name: "logrotate", - Name: "logs", {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, - {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, Name: "POD_NAME", Name: "POD_NAMESPASE", - {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, - {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, Name: "proxyadm", - {Name: "SERVICE_TYPE", Value: "mysql"}, Namespace: "users-13701", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "e9e51fc7d41db72e6f4e75c7f8e02cdd3c23f91bc42391d57ddac8fc9ef7a8bb", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "810871cf2d60f891b7bab2132d291bf614eaafcc4e48af38e28133d921af67f5", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "c86893dd-5aa5-4356-91fb-7a458b4a1eb2", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzA5ZjE5ZjUyNDE3M2Y3MmJlYmRiOTFkNWMyNmUzZjg0OGIzNjQ1MTg5NzEzNmIyODZlMDVmY2VkMmYxMzI2ZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzA5ZjE5ZjUyNDE3M2Y3MmJlYmRiOTFkNWMyNmUzZjg0OGIzNjQ1MTg5NzEzNmIyODZlMDVmY2VkMmYxMzI2ZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmNhMjBkYWI5OTM2Y2ZkYWE1MTU4NjA0YmNiNmU4YWJjMzU3ODgzMWRjMTFjZjE4ZTQyOTU1NjhlNDIzMWMwYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmNhMjBkYWI5OTM2Y2ZkYWE1MTU4NjA0YmNiNmU4YWJjMzU3ODgzMWRjMTFjZjE4ZTQyOTU1NjhlNDIzMWMwYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODEwODcxY2YyZDYwZjg5MWI3YmFiMjEzMmQyOTFiZjYxNGVhYWZjYzRlNDhhZjM4ZTI4MTMzZDkyMWFmNjdmNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGQwZWRiNTg2ZjUyODFkNDZhYTM5OTlmOWJjZjExZjJjNDY2MmFiZGU3YmExNzlmOTVkOWQyMmM2OWNmNWZiZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGQwZWRiNTg2ZjUyODFkNDZhYTM5OTlmOWJjZjExZjJjNDY2MmFiZGU3YmExNzlmOTVkOWQyMmM2OWNmNWZiZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjNlZmI5NWY1MTJhMGYzZDg1N2Q1OGYxMGNkMmQ3Y2U2ZTRmZGRlZGE4MmEyMTdlMmM0NmI0YzIyN2YzYjllZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjNlZmI5NWY1MTJhMGYzZDg1N2Q1OGYxMGNkMmQ3Y2U2ZTRmZGRlZGE4MmEyMTdlMmM0NmI0YzIyN2YzYjllZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZTllNTFmYzdkNDFkYjcyZTZmNGU3NWM3ZjhlMDJjZGQzYzIzZjkxYmM0MjM5MWQ1N2RkYWM4ZmM5ZWY3YThiYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmNhMjBkYWI5OTM2Y2ZkYWE1MTU4NjA0YmNiNmU4YWJjMzU3ODgzMWRjMTFjZjE4ZTQyOTU1NjhlNDIzMWMwYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODEwODcxY2YyZDYwZjg5MWI3YmFiMjEzMmQyOTFiZjYxNGVhYWZjYzRlNDhhZjM4ZTI4MTMzZDkyMWFmNjdmNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODEwODcxY2YyZDYwZjg5MWI3YmFiMjEzMmQyOTFiZjYxNGVhYWZjYzRlNDhhZjM4ZTI4MTMzZDkyMWFmNjdmNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODEwODcxY2YyZDYwZjg5MWI3YmFiMjEzMmQyOTFiZjYxNGVhYWZjYzRlNDhhZjM4ZTI4MTMzZDkyMWFmNjdmNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjM0LTA3ZmNjNDE5IiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM4LjAiLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODEwODcxY2YyZDYwZjg5MWI3YmFiMjEzMmQyOTFiZjYxNGVhYWZjYzRlNDhhZjM4ZTI4MTMzZDkyMWFmNjdmNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjM0LTA3ZmNjNDE5IiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiI0MDE1NzAzIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTRjZjEwYmVhZWUxZTc1OTBkM2ZmMzk0NGYyNzRmMTEwOTY5ZGQxZDg0NDg5Yjg0MWU1MDViY2Q1MmRiOTg1NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTRjZjEwYmVhZWUxZTc1OTBkM2ZmMzk0NGYyNzRmMTEwOTY5ZGQxZDg0NDg5Yjg0MWU1MDViY2Q1MmRiOTg1NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjNlZmI5NWY1MTJhMGYzZDg1N2Q1OGYxMGNkMmQ3Y2U2ZTRmZGRlZGE4MmEyMTdlMmM0NmI0YzIyN2YzYjllZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjNlZmI5NWY1MTJhMGYzZDg1N2Q1OGYxMGNkMmQ3Y2U2ZTRmZGRlZGE4MmEyMTdlMmM0NmI0YzIyN2YzYjllZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZTllNTFmYzdkNDFkYjcyZTZmNGU3NWM3ZjhlMDJjZGQzYzIzZjkxYmM0MjM5MWQ1N2RkYWM4ZmM5ZWY3YThiYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: nil, Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, ResizePolicy: nil, ResourceFieldRef: nil, Resources: {}, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1764688690603967002", - ResourceVersion: "1764688875797247019", - ResourceVersion: "1764689021269471002", - ResourceVersion: "1764689067342367002", - ResourceVersion: "1764689080395759002", - ResourceVersion: "1764689227150367019", - ResourceVersion: "1764689271839151002", - ResourceVersion: "1764689342826415002", - ResourceVersion: "1764689393907279002", - ResourceVersion: "1764689526618975019", - ResourceVersion: "1764689566216799002", - ResourceVersion: "1764689671408191019", - ResourceVersion: "1764689859588863019", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-12-02 15:17:36 +0000 UTC", - Time: s"2025-12-02 15:18:10 +0000 UTC", - Time: s"2025-12-02 15:21:15 +0000 UTC", - Time: s"2025-12-02 15:23:22 +0000 UTC", - Time: s"2025-12-02 15:23:41 +0000 UTC", - Time: s"2025-12-02 15:23:49 +0000 UTC", - Time: s"2025-12-02 15:24:27 +0000 UTC", - Time: s"2025-12-02 15:24:39 +0000 UTC", - Time: s"2025-12-02 15:24:40 +0000 UTC", - Time: s"2025-12-02 15:24:41 +0000 UTC", - Time: s"2025-12-02 15:27:07 +0000 UTC", - Time: s"2025-12-02 15:27:18 +0000 UTC", - Time: s"2025-12-02 15:27:51 +0000 UTC", - Time: s"2025-12-02 15:28:37 +0000 UTC", - Time: s"2025-12-02 15:29:02 +0000 UTC", - Time: s"2025-12-02 15:29:37 +0000 UTC", - Time: s"2025-12-02 15:29:53 +0000 UTC", - Time: s"2025-12-02 15:32:06 +0000 UTC", - Time: s"2025-12-02 15:32:13 +0000 UTC", - Time: s"2025-12-02 15:32:46 +0000 UTC", - Time: s"2025-12-02 15:34:15 +0000 UTC", - Time: s"2025-12-02 15:34:31 +0000 UTC", - Time: s"2025-12-02 15:34:31 +0000 UTC", - Time: s"2025-12-02 15:37:39 +0000 UTC", Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "6d867baf-0905-45fe-97a6-8b4749c79242", - UID: "8f325474-b9d1-4dc0-9d68-9d5ad73c7597", + UpdatedReplicas: 0, - UpdatedReplicas: 1, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-proxysql-568c66bc8b", - UpdateRevision: "some-name-proxysql-5c54d767f", - UpdateRevision: "some-name-proxysql-6486db9c7d", - UpdateRevision: "some-name-proxysql-697744cc75", - UpdateRevision: "some-name-proxysql-76b4f67745", - UpdateRevision: "some-name-proxysql-7f594fd985", - UpdateRevision: "some-name-pxc-57b756f95c", - UpdateRevision: "some-name-pxc-57dcdb9d76", - UpdateRevision: "some-name-pxc-6cf7c787fb", - UpdateRevision: "some-name-pxc-6dd6474f4", - UpdateRevision: "some-name-pxc-f9479cd95", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ Value: "", ValueFrom: &v1.EnvVarSource{ VolumeAttributesClassName: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, - VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-13701 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.UFbfcVdMWQ ++ mktemp + local LAST_ERR=/tmp/tmp.KV7qMNU5J5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UFbfcVdMWQ perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-13701 namespace + cat /tmp/tmp.KV7qMNU5J5 + rm /tmp/tmp.UFbfcVdMWQ /tmp/tmp.KV7qMNU5J5 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.qwxyULkuDS ++ mktemp + local LAST_ERR=/tmp/tmp.AqorGaPChZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qwxyULkuDS No resources found + cat /tmp/tmp.AqorGaPChZ + rm /tmp/tmp.qwxyULkuDS /tmp/tmp.AqorGaPChZ + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.0y7uobuKHr ++ mktemp + local LAST_ERR=/tmp/tmp.48VldPhRoQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0y7uobuKHr No resources found + cat /tmp/tmp.48VldPhRoQ + rm /tmp/tmp.0y7uobuKHr /tmp/tmp.48VldPhRoQ + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.RpreEFdP5R ++ mktemp + local LAST_ERR=/tmp/tmp.A1cw3x7N9a + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RpreEFdP5R validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.A1cw3x7N9a + rm /tmp/tmp.RpreEFdP5R /tmp/tmp.A1cw3x7N9a + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-13701 + rm -rf /tmp/tmp.AIh4ASubZF + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.y4pPNG7Y3H ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.54haNz4uM9 + local LAST_ERR=/tmp/tmp.kbJyTIfSrK + local exit_status=0 ++ seq 0 2 ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-13701 + local LAST_ERR=/tmp/tmp.1GJIxIWspP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator