Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/logs/users-5-7.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-17540 + local ns=users-17540 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-10867 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.OxrXQksChP ++ mktemp + local LAST_ERR=/tmp/tmp.EuPsuvouIt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OxrXQksChP perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-10867 namespace + cat /tmp/tmp.EuPsuvouIt + rm /tmp/tmp.OxrXQksChP /tmp/tmp.EuPsuvouIt + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.CRKFtdSfya ++ mktemp + local LAST_ERR=/tmp/tmp.ENdcLKwE06 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CRKFtdSfya No resources found + cat /tmp/tmp.ENdcLKwE06 + rm /tmp/tmp.CRKFtdSfya /tmp/tmp.ENdcLKwE06 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.i0c1ACqOHC ++ mktemp + local LAST_ERR=/tmp/tmp.EGsReX4I5C + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.i0c1ACqOHC No resources found + cat /tmp/tmp.EGsReX4I5C + rm /tmp/tmp.i0c1ACqOHC /tmp/tmp.EGsReX4I5C + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.qavkFI7jfk + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + local LAST_ERR=/tmp/tmp.IG3Qc81hGK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.FkAWe9mkhB ++ mktemp + local LAST_ERR=/tmp/tmp.Xt9RHMuIFK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FkAWe9mkhB + cat /tmp/tmp.Xt9RHMuIFK + rm /tmp/tmp.FkAWe9mkhB /tmp/tmp.Xt9RHMuIFK + return 0 namespace "users-10867" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qavkFI7jfk namespace "pxc-operator" deleted + cat /tmp/tmp.IG3Qc81hGK + rm /tmp/tmp.qavkFI7jfk /tmp/tmp.IG3Qc81hGK + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.eq2hwoPFE1 ++ mktemp + local LAST_ERR=/tmp/tmp.QZatEIF3yS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eq2hwoPFE1 namespace/pxc-operator created + cat /tmp/tmp.QZatEIF3yS + rm /tmp/tmp.eq2hwoPFE1 /tmp/tmp.QZatEIF3yS + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.2OmQCJOsom +++ mktemp ++ local LAST_ERR=/tmp/tmp.OY9KbnTKFE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2OmQCJOsom ++ cat /tmp/tmp.OY9KbnTKFE ++ rm /tmp/tmp.2OmQCJOsom /tmp/tmp.OY9KbnTKFE ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster1 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.bfTlqVCYsM ++ mktemp + local LAST_ERR=/tmp/tmp.sZIv5fAFdr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster1 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bfTlqVCYsM Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster1" modified. + cat /tmp/tmp.sZIv5fAFdr + rm /tmp/tmp.bfTlqVCYsM /tmp/tmp.sZIv5fAFdr + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.oTnOqTUqyb ++ mktemp + local LAST_ERR=/tmp/tmp.GrDZbTly4L + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oTnOqTUqyb customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.GrDZbTly4L + rm /tmp/tmp.oTnOqTUqyb /tmp/tmp.GrDZbTly4L + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ePE3rwyxra ++ mktemp + local LAST_ERR=/tmp/tmp.r4D6eeE5h4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ePE3rwyxra clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.r4D6eeE5h4 + rm /tmp/tmp.ePE3rwyxra /tmp/tmp.r4D6eeE5h4 + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2202-c5e2e681^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + local LAST_OUT=/tmp/tmp.jwOBeVg4IZ ++ mktemp + local LAST_ERR=/tmp/tmp.MLAdAV5fqW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jwOBeVg4IZ deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.MLAdAV5fqW + rm /tmp/tmp.jwOBeVg4IZ /tmp/tmp.MLAdAV5fqW + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.EA6NyRCzxn ++ mktemp + local LAST_ERR=/tmp/tmp.0W1wOoEEvY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EA6NyRCzxn pod/percona-xtradb-cluster-operator-d84c66648-5h9dt condition met + cat /tmp/tmp.0W1wOoEEvY + rm /tmp/tmp.EA6NyRCzxn /tmp/tmp.0W1wOoEEvY + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZKbJ0uYoUY +++ mktemp ++ local LAST_ERR=/tmp/tmp.RIRYKA7Jc1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZKbJ0uYoUY ++ cat /tmp/tmp.RIRYKA7Jc1 ++ rm /tmp/tmp.ZKbJ0uYoUY /tmp/tmp.RIRYKA7Jc1 ++ return 0 + wait_pod percona-xtradb-cluster-operator-d84c66648-5h9dt 480 pxc-operator + local pod=percona-xtradb-cluster-operator-d84c66648-5h9dt + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-d84c66648-5h9dt ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-d84c66648-5h9dt condition met waiting for pod/percona-xtradb-cluster-operator-d84c66648-5h9dt to become Ready.Ok + sleep 3 + create_namespace users-17540 + local namespace=users-17540 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces users-17540' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-17540 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-17540 ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.Tayzn9O5w5 + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + kubectl_bin get ns ++ mktemp + awk '{print$1}' ++ mktemp + local LAST_OUT=/tmp/tmp.2W52wXKk7t + local LAST_ERR=/tmp/tmp.P6urNVnRdu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-17540 ++ mktemp + local LAST_ERR=/tmp/tmp.KBKaq0N3ll + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2W52wXKk7t + cat /tmp/tmp.KBKaq0N3ll + rm /tmp/tmp.2W52wXKk7t /tmp/tmp.KBKaq0N3ll + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-17540 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-17540 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.Tayzn9O5w5 + cat /tmp/tmp.P6urNVnRdu Error from server (NotFound): namespaces "users-17540" not found + rm /tmp/tmp.Tayzn9O5w5 /tmp/tmp.P6urNVnRdu + return 1 + : + wait_for_delete namespace/users-17540 + local res=namespace/users-17540 + echo -n 'waiting for namespace/users-17540 to be deleted' waiting for namespace/users-17540 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-17540" not found + desc 'create namespace users-17540' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-17540 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-17540 ++ mktemp + local LAST_OUT=/tmp/tmp.n6HV90FDyG ++ mktemp + local LAST_ERR=/tmp/tmp.CdIkyMk8Ac + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-17540 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.n6HV90FDyG namespace/users-17540 created + cat /tmp/tmp.CdIkyMk8Ac + rm /tmp/tmp.n6HV90FDyG /tmp/tmp.CdIkyMk8Ac + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.nrk39uIbaA +++ mktemp ++ local LAST_ERR=/tmp/tmp.sGPK1Fm3h6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nrk39uIbaA ++ cat /tmp/tmp.sGPK1Fm3h6 ++ rm /tmp/tmp.nrk39uIbaA /tmp/tmp.sGPK1Fm3h6 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster1 --namespace=users-17540 ++ mktemp + local LAST_OUT=/tmp/tmp.UIhVNhi1o8 ++ mktemp + local LAST_ERR=/tmp/tmp.g2U0dZnK8i + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster1 --namespace=users-17540 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UIhVNhi1o8 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2202-c5e2e681-2-cluster1" modified. + cat /tmp/tmp.g2U0dZnK8i + rm /tmp/tmp.UIhVNhi1o8 /tmp/tmp.g2U0dZnK8i + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ji7BXKwsh9 ++ mktemp + local LAST_ERR=/tmp/tmp.YqKyfU05ul + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ji7BXKwsh9 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.YqKyfU05ul + rm /tmp/tmp.ji7BXKwsh9 /tmp/tmp.YqKyfU05ul + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.DF2xyLZcjl ++ mktemp + local LAST_ERR=/tmp/tmp.nio8M5Qv3K + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DF2xyLZcjl secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.nio8M5Qv3K + rm /tmp/tmp.DF2xyLZcjl /tmp/tmp.nio8M5Qv3K + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/client.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-17540~ + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2202-c5e2e681#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + local LAST_OUT=/tmp/tmp.rYPGgce1Uc ++ mktemp + local LAST_ERR=/tmp/tmp.cJ8GBCuIIF + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rYPGgce1Uc deployment.apps/pxc-client created + cat /tmp/tmp.cJ8GBCuIIF + rm /tmp/tmp.rYPGgce1Uc /tmp/tmp.cJ8GBCuIIF + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2202-c5e2e681#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-17540~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + local LAST_OUT=/tmp/tmp.BTJxXdA6dP ++ mktemp + local LAST_ERR=/tmp/tmp.K5TRyOxVdf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BTJxXdA6dP perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.K5TRyOxVdf + rm /tmp/tmp.BTJxXdA6dP /tmp/tmp.K5TRyOxVdf + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.h1CyB37juj ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Lz4Xs9fWwr +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.h1CyB37juj +++ cat /tmp/tmp.Lz4Xs9fWwr +++ rm /tmp/tmp.h1CyB37juj /tmp/tmp.Lz4Xs9fWwr +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.OI3RMG6Yul ++++ mktemp +++ local LAST_ERR=/tmp/tmp.jlCdFQ70Xw +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.OI3RMG6Yul +++ cat /tmp/tmp.jlCdFQ70Xw +++ rm /tmp/tmp.OI3RMG6Yul /tmp/tmp.jlCdFQ70Xw +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-17540 ++ mktemp + local LAST_OUT=/tmp/tmp.IZjnS0hIqI ++ mktemp + local LAST_ERR=/tmp/tmp.APNFCefRjO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-17540 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-17540 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-17540 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.IZjnS0hIqI + cat /tmp/tmp.APNFCefRjO error: no matching resources found + rm /tmp/tmp.IZjnS0hIqI /tmp/tmp.APNFCefRjO + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.odxj1EI1av +++ mktemp ++ local LAST_ERR=/tmp/tmp.6rvzI6j7EO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.odxj1EI1av ++ cat /tmp/tmp.6rvzI6j7EO ++ rm /tmp/tmp.odxj1EI1av /tmp/tmp.6rvzI6j7EO ++ return 0 + local 'root_pass=>eE19u+%EVOm<#$]VQg' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xcCRQPgNF3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.R9KMu65WTX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xcCRQPgNF3 ++ cat /tmp/tmp.R9KMu65WTX Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.xcCRQPgNF3 /tmp/tmp.R9KMu65WTX ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0oxLnXlXoS +++ mktemp ++ local LAST_ERR=/tmp/tmp.h8yd3uewiD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0oxLnXlXoS ++ cat /tmp/tmp.h8yd3uewiD ++ rm /tmp/tmp.0oxLnXlXoS /tmp/tmp.h8yd3uewiD ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JdpplZ802v +++ mktemp ++ local LAST_ERR=/tmp/tmp.5gA6F5iVPp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JdpplZ802v ++ cat /tmp/tmp.5gA6F5iVPp ++ rm /tmp/tmp.JdpplZ802v /tmp/tmp.5gA6F5iVPp ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uidEki6QLX +++ mktemp ++ local LAST_ERR=/tmp/tmp.86sluFyZKP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uidEki6QLX ++ cat /tmp/tmp.86sluFyZKP ++ rm /tmp/tmp.uidEki6QLX /tmp/tmp.86sluFyZKP ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-1.sql /tmp/tmp.o30MpYaJxV/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VXrPhAcqkV +++ mktemp ++ local LAST_ERR=/tmp/tmp.oeGU0FqzdF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VXrPhAcqkV ++ cat /tmp/tmp.oeGU0FqzdF ++ rm /tmp/tmp.VXrPhAcqkV /tmp/tmp.oeGU0FqzdF ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-1.sql /tmp/tmp.o30MpYaJxV/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''>eE19u+%EVOm<#$]VQg'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XT7sSZ32co +++ mktemp ++ local LAST_ERR=/tmp/tmp.lksFA4xmVD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XT7sSZ32co ++ cat /tmp/tmp.lksFA4xmVD ++ rm /tmp/tmp.XT7sSZ32co /tmp/tmp.lksFA4xmVD ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-1.sql /tmp/tmp.o30MpYaJxV/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZWQ32rQswK +++ mktemp ++ local LAST_ERR=/tmp/tmp.sDmTBi80cC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZWQ32rQswK ++ cat /tmp/tmp.sDmTBi80cC Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.ZWQ32rQswK /tmp/tmp.sDmTBi80cC ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.EaOMlpjsB5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sptaIRMcPj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EaOMlpjsB5 ++ cat /tmp/tmp.sptaIRMcPj ++ rm /tmp/tmp.EaOMlpjsB5 /tmp/tmp.sptaIRMcPj ++ return 0 + secret_pass='>eE19u+%EVOm<#$]VQg' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ base64 --decode ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.svnwdvZblR +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZtWlunmxSi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.svnwdvZblR ++ cat /tmp/tmp.ZtWlunmxSi ++ rm /tmp/tmp.svnwdvZblR /tmp/tmp.ZtWlunmxSi ++ return 0 + int_secret_pass='>eE19u+%EVOm<#$]VQg' + [[ -z >eE19u+%EVOm<#$]VQg ]] + [[ >eE19u+%EVOm<#$]VQg != \>\e\E\1\9\u\+\%\E\V\O\m\<\#\$\]\V\Q\g ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''>eE19u+%EVOm<#$]VQg'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''>eE19u+%EVOm<#$]VQg'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''>eE19u+%EVOm<#$]VQg'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''>eE19u+%EVOm<#$]VQg'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GyHiFxQ5sW +++ mktemp ++ local LAST_ERR=/tmp/tmp.NWeUixx2nT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GyHiFxQ5sW ++ cat /tmp/tmp.NWeUixx2nT ++ rm /tmp/tmp.GyHiFxQ5sW /tmp/tmp.NWeUixx2nT ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-857d976497-pvdtf + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql /tmp/tmp.o30MpYaJxV/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.k0hlHLVFoE +++ mktemp ++ local LAST_ERR=/tmp/tmp.aia6gSujNg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k0hlHLVFoE ++ cat /tmp/tmp.aia6gSujNg ++ rm /tmp/tmp.k0hlHLVFoE /tmp/tmp.aia6gSujNg ++ return 0 + secret_pass='{4Yju?sv)zUY4)]{8m[' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ilIr5wP8tE +++ mktemp ++ local LAST_ERR=/tmp/tmp.epwHQDnPRf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ilIr5wP8tE ++ cat /tmp/tmp.epwHQDnPRf ++ rm /tmp/tmp.ilIr5wP8tE /tmp/tmp.epwHQDnPRf ++ return 0 + int_secret_pass='{4Yju?sv)zUY4)]{8m[' + [[ -z {4Yju?sv)zUY4)]{8m[ ]] + [[ {4Yju?sv)zUY4)]{8m[ != \{\4\Y\j\u\?\s\v\)\z\U\Y\4\)\]\{\8\m\[ ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''{4Yju?sv)zUY4)]{8m['\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''{4Yju?sv)zUY4)]{8m['\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''{4Yju?sv)zUY4)]{8m['\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''{4Yju?sv)zUY4)]{8m['\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3MdhS9EMYC +++ mktemp ++ local LAST_ERR=/tmp/tmp.ehdZOTMxol ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3MdhS9EMYC ++ cat /tmp/tmp.ehdZOTMxol ++ rm /tmp/tmp.3MdhS9EMYC /tmp/tmp.ehdZOTMxol ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql /tmp/tmp.o30MpYaJxV/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.mzJ7uWKlwV +++ mktemp ++ local LAST_ERR=/tmp/tmp.wjL411ch8S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mzJ7uWKlwV ++ cat /tmp/tmp.wjL411ch8S ++ rm /tmp/tmp.mzJ7uWKlwV /tmp/tmp.wjL411ch8S ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.3xbbNakLdx +++ mktemp ++ local LAST_ERR=/tmp/tmp.ONDrTyph5t ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3xbbNakLdx ++ cat /tmp/tmp.ONDrTyph5t ++ rm /tmp/tmp.3xbbNakLdx /tmp/tmp.ONDrTyph5t ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7L627ENoqb +++ mktemp ++ local LAST_ERR=/tmp/tmp.OprfSvRxtl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7L627ENoqb ++ cat /tmp/tmp.OprfSvRxtl ++ rm /tmp/tmp.7L627ENoqb /tmp/tmp.OprfSvRxtl ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql /tmp/tmp.o30MpYaJxV/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y1C00L2zGM +++ mktemp ++ local LAST_ERR=/tmp/tmp.s8wi0FyWVi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y1C00L2zGM ++ cat /tmp/tmp.s8wi0FyWVi ++ rm /tmp/tmp.Y1C00L2zGM /tmp/tmp.s8wi0FyWVi ++ return 0 + secret_pass='AG,nVQI~(C+h^e&h12' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.1QgktpdiSX +++ mktemp ++ local LAST_ERR=/tmp/tmp.lsnq8H3I6A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1QgktpdiSX ++ cat /tmp/tmp.lsnq8H3I6A ++ rm /tmp/tmp.1QgktpdiSX /tmp/tmp.lsnq8H3I6A ++ return 0 + int_secret_pass='AG,nVQI~(C+h^e&h12' + [[ -z AG,nVQI~(C+h^e&h12 ]] + [[ AG,nVQI~(C+h^e&h12 != \A\G\,\n\V\Q\I\~\(\C\+\h\^\e\&\h\1\2 ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''AG,nVQI~(C+h^e&h12'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''AG,nVQI~(C+h^e&h12'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''AG,nVQI~(C+h^e&h12'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''AG,nVQI~(C+h^e&h12'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-2.sql /tmp/tmp.o30MpYaJxV/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.zxe1ZAABWt +++ mktemp ++ local LAST_ERR=/tmp/tmp.7LAog5YHHv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zxe1ZAABWt ++ cat /tmp/tmp.7LAog5YHHv ++ rm /tmp/tmp.zxe1ZAABWt /tmp/tmp.7LAog5YHHv ++ return 0 + secret_pass='])c@,Fn?9Hwwt&uOU' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.zmp5zDMSz6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.n7b4dfgk8O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zmp5zDMSz6 ++ cat /tmp/tmp.n7b4dfgk8O ++ rm /tmp/tmp.zmp5zDMSz6 /tmp/tmp.n7b4dfgk8O ++ return 0 + int_secret_pass='])c@,Fn?9Hwwt&uOU' + [[ -z ])c@,Fn?9Hwwt&uOU ]] + [[ ])c@,Fn?9Hwwt&uOU != \]\)\c\@\,\F\n\?\9\H\w\w\t\&\u\O\U ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''])c@,Fn?9Hwwt&uOU'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''])c@,Fn?9Hwwt&uOU'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''])c@,Fn?9Hwwt&uOU'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''])c@,Fn?9Hwwt&uOU'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o0bGvlKea2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.u6ArShXLld ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o0bGvlKea2 ++ cat /tmp/tmp.u6ArShXLld ++ rm /tmp/tmp.o0bGvlKea2 /tmp/tmp.u6ArShXLld ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql /tmp/tmp.o30MpYaJxV/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.S6ttGpHJ02 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q28TuZXHRA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S6ttGpHJ02 ++ cat /tmp/tmp.Q28TuZXHRA ++ rm /tmp/tmp.S6ttGpHJ02 /tmp/tmp.Q28TuZXHRA ++ return 0 + secret_pass=')S!RPM#gT0V.P2au' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.8AiFtQS5mt +++ mktemp ++ local LAST_ERR=/tmp/tmp.R5vtTvWFJR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8AiFtQS5mt ++ cat /tmp/tmp.R5vtTvWFJR ++ rm /tmp/tmp.8AiFtQS5mt /tmp/tmp.R5vtTvWFJR ++ return 0 + int_secret_pass=')S!RPM#gT0V.P2au' + [[ -z )S!RPM#gT0V.P2au ]] + [[ )S!RPM#gT0V.P2au != \)\S\!\R\P\M\#\g\T\0\V\.\P\2\a\u ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\'')S!RPM#gT0V.P2au'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\'')S!RPM#gT0V.P2au'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\'')S!RPM#gT0V.P2au'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\'')S!RPM#gT0V.P2au'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V9La9pNdJf +++ mktemp ++ local LAST_ERR=/tmp/tmp.5YjWOowyp9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V9La9pNdJf ++ cat /tmp/tmp.5YjWOowyp9 ++ rm /tmp/tmp.V9La9pNdJf /tmp/tmp.5YjWOowyp9 ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql /tmp/tmp.o30MpYaJxV/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.3VCLGdkaAP ++ mktemp + local LAST_ERR=/tmp/tmp.DghNXsHahZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3VCLGdkaAP secret/my-cluster-secrets patched + cat /tmp/tmp.DghNXsHahZ + rm /tmp/tmp.3VCLGdkaAP /tmp/tmp.DghNXsHahZ + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WpoKdeSHAB +++ mktemp ++ local LAST_ERR=/tmp/tmp.5Naaz3aJsV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WpoKdeSHAB ++ cat /tmp/tmp.5Naaz3aJsV ++ rm /tmp/tmp.WpoKdeSHAB /tmp/tmp.5Naaz3aJsV ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-857d976497-pvdtf ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql /tmp/tmp.o30MpYaJxV/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.TECUidfNA4 ++ mktemp + local LAST_ERR=/tmp/tmp.XbYu6MT01C + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TECUidfNA4 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.XbYu6MT01C + rm /tmp/tmp.TECUidfNA4 /tmp/tmp.XbYu6MT01C + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gXjjJjU5fq +++ mktemp ++ local LAST_ERR=/tmp/tmp.TNicqpUczN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gXjjJjU5fq ++ cat /tmp/tmp.TNicqpUczN ++ rm /tmp/tmp.gXjjJjU5fq /tmp/tmp.TNicqpUczN ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xorePMXVY4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Isr6ASwxo0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xorePMXVY4 ++ cat /tmp/tmp.Isr6ASwxo0 ++ rm /tmp/tmp.xorePMXVY4 /tmp/tmp.Isr6ASwxo0 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6isSlpv34v ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Q1Nk4UiRyI +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6isSlpv34v +++++ cat /tmp/tmp.Q1Nk4UiRyI +++++ rm /tmp/tmp.6isSlpv34v /tmp/tmp.Q1Nk4UiRyI +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.47U9HQPlzm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SZpjOyJhQ6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.47U9HQPlzm +++++ cat /tmp/tmp.SZpjOyJhQ6 +++++ rm /tmp/tmp.47U9HQPlzm /tmp/tmp.SZpjOyJhQ6 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GINhh5dMbS +++ mktemp ++ local LAST_ERR=/tmp/tmp.dV8017ZHhr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GINhh5dMbS ++ cat /tmp/tmp.dV8017ZHhr ++ rm /tmp/tmp.GINhh5dMbS /tmp/tmp.dV8017ZHhr ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.LVRgE7kfsG ++ mktemp + local LAST_ERR=/tmp/tmp.EMYrkx6Fmu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LVRgE7kfsG secret/my-cluster-secrets patched + cat /tmp/tmp.EMYrkx6Fmu + rm /tmp/tmp.LVRgE7kfsG /tmp/tmp.EMYrkx6Fmu + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GCYyc4dtIo +++ mktemp ++ local LAST_ERR=/tmp/tmp.GJQt0mkehB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GCYyc4dtIo ++ cat /tmp/tmp.GJQt0mkehB ++ rm /tmp/tmp.GCYyc4dtIo /tmp/tmp.GJQt0mkehB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6oRIyMvKTK +++ mktemp ++ local LAST_ERR=/tmp/tmp.f9FMBbAPrV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6oRIyMvKTK ++ cat /tmp/tmp.f9FMBbAPrV ++ rm /tmp/tmp.6oRIyMvKTK /tmp/tmp.f9FMBbAPrV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5O8os2YbfP +++ mktemp ++ local LAST_ERR=/tmp/tmp.D6TumSlXRj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5O8os2YbfP ++ cat /tmp/tmp.D6TumSlXRj ++ rm /tmp/tmp.5O8os2YbfP /tmp/tmp.D6TumSlXRj ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.90vJXsF6WI +++ mktemp ++ local LAST_ERR=/tmp/tmp.Yrs6FkAy3I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.90vJXsF6WI ++ cat /tmp/tmp.Yrs6FkAy3I ++ rm /tmp/tmp.90vJXsF6WI /tmp/tmp.Yrs6FkAy3I ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mCW2urSxtf ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.D85GDaPjWs +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mCW2urSxtf +++++ cat /tmp/tmp.D85GDaPjWs +++++ rm /tmp/tmp.mCW2urSxtf /tmp/tmp.D85GDaPjWs +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jrWwsCSe5k ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.rl8JVziFQv +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jrWwsCSe5k +++++ cat /tmp/tmp.rl8JVziFQv +++++ rm /tmp/tmp.jrWwsCSe5k /tmp/tmp.rl8JVziFQv +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vbv8j09xet +++ mktemp ++ local LAST_ERR=/tmp/tmp.8SfkiSeZvw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vbv8j09xet ++ cat /tmp/tmp.8SfkiSeZvw ++ rm /tmp/tmp.Vbv8j09xet /tmp/tmp.8SfkiSeZvw ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-2.sql /tmp/tmp.o30MpYaJxV/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-2.sql /tmp/tmp.o30MpYaJxV/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-2.sql /tmp/tmp.o30MpYaJxV/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.2qk25fKtMs ++ mktemp + local LAST_ERR=/tmp/tmp.gHvTC7y0Vg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2qk25fKtMs perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.gHvTC7y0Vg + rm /tmp/tmp.2qk25fKtMs /tmp/tmp.gHvTC7y0Vg + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.bBwUODOJE0 ++ mktemp + local LAST_ERR=/tmp/tmp.42UGBJS27n + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bBwUODOJE0 secret/my-cluster-secrets patched + cat /tmp/tmp.42UGBJS27n + rm /tmp/tmp.bBwUODOJE0 /tmp/tmp.42UGBJS27n + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.svhte6Rnks +++ mktemp ++ local LAST_ERR=/tmp/tmp.d4DaXd4v0J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.svhte6Rnks ++ cat /tmp/tmp.d4DaXd4v0J ++ rm /tmp/tmp.svhte6Rnks /tmp/tmp.d4DaXd4v0J ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FFNJfdjJZc +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hz9uVCz8wV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FFNJfdjJZc ++ cat /tmp/tmp.Hz9uVCz8wV ++ rm /tmp/tmp.FFNJfdjJZc /tmp/tmp.Hz9uVCz8wV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BflDMPQz46 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mDIalAvrFL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BflDMPQz46 ++ cat /tmp/tmp.mDIalAvrFL ++ rm /tmp/tmp.BflDMPQz46 /tmp/tmp.mDIalAvrFL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dxoFZPSMEr +++ mktemp ++ local LAST_ERR=/tmp/tmp.03VMplzvaE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dxoFZPSMEr ++ cat /tmp/tmp.03VMplzvaE ++ rm /tmp/tmp.dxoFZPSMEr /tmp/tmp.03VMplzvaE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I4HzDXBLEZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.PvTnxQajyN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I4HzDXBLEZ ++ cat /tmp/tmp.PvTnxQajyN ++ rm /tmp/tmp.I4HzDXBLEZ /tmp/tmp.PvTnxQajyN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q1bk68FWwF +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ss3u1Bdu4d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q1bk68FWwF ++ cat /tmp/tmp.Ss3u1Bdu4d ++ rm /tmp/tmp.Q1bk68FWwF /tmp/tmp.Ss3u1Bdu4d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GdEmiGZwCJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.AwKIgWyL3N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GdEmiGZwCJ ++ cat /tmp/tmp.AwKIgWyL3N ++ rm /tmp/tmp.GdEmiGZwCJ /tmp/tmp.AwKIgWyL3N ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.40l1pqRWXi +++ mktemp ++ local LAST_ERR=/tmp/tmp.l2m93vKIAP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.40l1pqRWXi ++ cat /tmp/tmp.l2m93vKIAP ++ rm /tmp/tmp.40l1pqRWXi /tmp/tmp.l2m93vKIAP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rN6SvJj5DK +++ mktemp ++ local LAST_ERR=/tmp/tmp.9jHlmrQZGg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rN6SvJj5DK ++ cat /tmp/tmp.9jHlmrQZGg ++ rm /tmp/tmp.rN6SvJj5DK /tmp/tmp.9jHlmrQZGg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.umKPy2eLEz +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZOi7Ans4tk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.umKPy2eLEz ++ cat /tmp/tmp.ZOi7Ans4tk ++ rm /tmp/tmp.umKPy2eLEz /tmp/tmp.ZOi7Ans4tk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y7KewyK23n +++ mktemp ++ local LAST_ERR=/tmp/tmp.aHB6mawSqH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y7KewyK23n ++ cat /tmp/tmp.aHB6mawSqH ++ rm /tmp/tmp.y7KewyK23n /tmp/tmp.aHB6mawSqH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.185LsNpyDF +++ mktemp ++ local LAST_ERR=/tmp/tmp.sc8F6z2avP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.185LsNpyDF ++ cat /tmp/tmp.sc8F6z2avP ++ rm /tmp/tmp.185LsNpyDF /tmp/tmp.sc8F6z2avP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VFandpBlmH +++ mktemp ++ local LAST_ERR=/tmp/tmp.FDD9ysSsxy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VFandpBlmH ++ cat /tmp/tmp.FDD9ysSsxy ++ rm /tmp/tmp.VFandpBlmH /tmp/tmp.FDD9ysSsxy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mGgdHhEt4R +++ mktemp ++ local LAST_ERR=/tmp/tmp.wcqgM2yxuK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mGgdHhEt4R ++ cat /tmp/tmp.wcqgM2yxuK ++ rm /tmp/tmp.mGgdHhEt4R /tmp/tmp.wcqgM2yxuK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S4OAum0gAn +++ mktemp ++ local LAST_ERR=/tmp/tmp.N4Xs7FO5Ly ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S4OAum0gAn ++ cat /tmp/tmp.N4Xs7FO5Ly ++ rm /tmp/tmp.S4OAum0gAn /tmp/tmp.N4Xs7FO5Ly ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1mw5uQvXMm +++ mktemp ++ local LAST_ERR=/tmp/tmp.hWOb9zcfoO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1mw5uQvXMm ++ cat /tmp/tmp.hWOb9zcfoO ++ rm /tmp/tmp.1mw5uQvXMm /tmp/tmp.hWOb9zcfoO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oXURzTBxJR +++ mktemp ++ local LAST_ERR=/tmp/tmp.52G57JEQOA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oXURzTBxJR ++ cat /tmp/tmp.52G57JEQOA ++ rm /tmp/tmp.oXURzTBxJR /tmp/tmp.52G57JEQOA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5icVh7yuG3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.iFGLIqodM5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5icVh7yuG3 ++ cat /tmp/tmp.iFGLIqodM5 ++ rm /tmp/tmp.5icVh7yuG3 /tmp/tmp.iFGLIqodM5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B2rPoPYZKD +++ mktemp ++ local LAST_ERR=/tmp/tmp.v0IaCn1YfZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B2rPoPYZKD ++ cat /tmp/tmp.v0IaCn1YfZ ++ rm /tmp/tmp.B2rPoPYZKD /tmp/tmp.v0IaCn1YfZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UmT2ds5Cca +++ mktemp ++ local LAST_ERR=/tmp/tmp.5C4xoKOSKA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UmT2ds5Cca ++ cat /tmp/tmp.5C4xoKOSKA ++ rm /tmp/tmp.UmT2ds5Cca /tmp/tmp.5C4xoKOSKA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jT9I8OWcpF +++ mktemp ++ local LAST_ERR=/tmp/tmp.0YGIZ9yPbD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jT9I8OWcpF ++ cat /tmp/tmp.0YGIZ9yPbD ++ rm /tmp/tmp.jT9I8OWcpF /tmp/tmp.0YGIZ9yPbD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8TwFsRM9Jt +++ mktemp ++ local LAST_ERR=/tmp/tmp.3qXqyuw51M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8TwFsRM9Jt ++ cat /tmp/tmp.3qXqyuw51M ++ rm /tmp/tmp.8TwFsRM9Jt /tmp/tmp.3qXqyuw51M ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vPw0HivtZS +++ mktemp ++ local LAST_ERR=/tmp/tmp.MaPQT8vCpP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vPw0HivtZS ++ cat /tmp/tmp.MaPQT8vCpP ++ rm /tmp/tmp.vPw0HivtZS /tmp/tmp.MaPQT8vCpP ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IRaOII44T3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q69ubIVgiW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IRaOII44T3 ++ cat /tmp/tmp.Q69ubIVgiW ++ rm /tmp/tmp.IRaOII44T3 /tmp/tmp.Q69ubIVgiW ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.QiYY2VVkcw ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.PZ1gEUf0Zw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.QiYY2VVkcw +++++ cat /tmp/tmp.PZ1gEUf0Zw +++++ rm /tmp/tmp.QiYY2VVkcw /tmp/tmp.PZ1gEUf0Zw +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.U2lwd1ggAn ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.weGqjoCApn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.U2lwd1ggAn +++++ cat /tmp/tmp.weGqjoCApn +++++ rm /tmp/tmp.U2lwd1ggAn /tmp/tmp.weGqjoCApn +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wm4DqVaaMo +++ mktemp ++ local LAST_ERR=/tmp/tmp.jDClq6FUID ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wm4DqVaaMo ++ cat /tmp/tmp.jDClq6FUID ++ rm /tmp/tmp.wm4DqVaaMo /tmp/tmp.jDClq6FUID ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-3.sql /tmp/tmp.o30MpYaJxV/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.NaIo26LPow ++ mktemp + local LAST_ERR=/tmp/tmp.pdcBKFeGyB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NaIo26LPow secret/my-cluster-secrets patched + cat /tmp/tmp.pdcBKFeGyB + rm /tmp/tmp.NaIo26LPow /tmp/tmp.pdcBKFeGyB + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.0NmRKai0KZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.wudXSnpA2n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0NmRKai0KZ ++ cat /tmp/tmp.wudXSnpA2n ++ rm /tmp/tmp.0NmRKai0KZ /tmp/tmp.wudXSnpA2n ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FxpDODw1Eu +++ mktemp ++ local LAST_ERR=/tmp/tmp.5TUioyHBc3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FxpDODw1Eu ++ cat /tmp/tmp.5TUioyHBc3 ++ rm /tmp/tmp.FxpDODw1Eu /tmp/tmp.5TUioyHBc3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xZ7TjYugoG +++ mktemp ++ local LAST_ERR=/tmp/tmp.dtMxYFlT5s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xZ7TjYugoG ++ cat /tmp/tmp.dtMxYFlT5s ++ rm /tmp/tmp.xZ7TjYugoG /tmp/tmp.dtMxYFlT5s ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ljFzuhcb3c +++ mktemp ++ local LAST_ERR=/tmp/tmp.4wQASJZPes ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ljFzuhcb3c ++ cat /tmp/tmp.4wQASJZPes ++ rm /tmp/tmp.ljFzuhcb3c /tmp/tmp.4wQASJZPes ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lo4uCzEAR4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bt0k1Sj4Dn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lo4uCzEAR4 ++ cat /tmp/tmp.bt0k1Sj4Dn ++ rm /tmp/tmp.lo4uCzEAR4 /tmp/tmp.bt0k1Sj4Dn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x50CrdVJ8d +++ mktemp ++ local LAST_ERR=/tmp/tmp.v8y6qbU4y1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x50CrdVJ8d ++ cat /tmp/tmp.v8y6qbU4y1 ++ rm /tmp/tmp.x50CrdVJ8d /tmp/tmp.v8y6qbU4y1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S23yY0hPTg +++ mktemp ++ local LAST_ERR=/tmp/tmp.1CrRJmNCyt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S23yY0hPTg ++ cat /tmp/tmp.1CrRJmNCyt ++ rm /tmp/tmp.S23yY0hPTg /tmp/tmp.1CrRJmNCyt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bTEwiQYEbJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.tWcCQelAxV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bTEwiQYEbJ ++ cat /tmp/tmp.tWcCQelAxV ++ rm /tmp/tmp.bTEwiQYEbJ /tmp/tmp.tWcCQelAxV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zJroA1OrlF +++ mktemp ++ local LAST_ERR=/tmp/tmp.knD65hbZvT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zJroA1OrlF ++ cat /tmp/tmp.knD65hbZvT ++ rm /tmp/tmp.zJroA1OrlF /tmp/tmp.knD65hbZvT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xb2GpzFBkc +++ mktemp ++ local LAST_ERR=/tmp/tmp.RaOuQmExt5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xb2GpzFBkc ++ cat /tmp/tmp.RaOuQmExt5 ++ rm /tmp/tmp.xb2GpzFBkc /tmp/tmp.RaOuQmExt5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nXf6a7Pl9P +++ mktemp ++ local LAST_ERR=/tmp/tmp.deeeTuCA8h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nXf6a7Pl9P ++ cat /tmp/tmp.deeeTuCA8h ++ rm /tmp/tmp.nXf6a7Pl9P /tmp/tmp.deeeTuCA8h ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1kpbPWxU5f +++ mktemp ++ local LAST_ERR=/tmp/tmp.kBBVGJRYJO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1kpbPWxU5f ++ cat /tmp/tmp.kBBVGJRYJO ++ rm /tmp/tmp.1kpbPWxU5f /tmp/tmp.kBBVGJRYJO ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.S1qr9GaKOg ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.wpsd6kQxSn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.S1qr9GaKOg +++++ cat /tmp/tmp.wpsd6kQxSn +++++ rm /tmp/tmp.S1qr9GaKOg /tmp/tmp.wpsd6kQxSn +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.hq7e4SdKHj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UvUclGxAMa +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.hq7e4SdKHj +++++ cat /tmp/tmp.UvUclGxAMa +++++ rm /tmp/tmp.hq7e4SdKHj /tmp/tmp.UvUclGxAMa +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4hjaSX2Ycv +++ mktemp ++ local LAST_ERR=/tmp/tmp.P3zxX8l6yj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4hjaSX2Ycv ++ cat /tmp/tmp.P3zxX8l6yj ++ rm /tmp/tmp.4hjaSX2Ycv /tmp/tmp.P3zxX8l6yj ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hTV8GAo2d5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xrQnK2XREw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hTV8GAo2d5 ++ cat /tmp/tmp.xrQnK2XREw ++ rm /tmp/tmp.hTV8GAo2d5 /tmp/tmp.xrQnK2XREw ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql /tmp/tmp.o30MpYaJxV/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.BL1rLLFVbq ++ mktemp + local LAST_ERR=/tmp/tmp.Yhh0MZ0pXh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BL1rLLFVbq secret/my-cluster-secrets patched + cat /tmp/tmp.Yhh0MZ0pXh + rm /tmp/tmp.BL1rLLFVbq /tmp/tmp.Yhh0MZ0pXh + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R6fx5Uos5S +++ mktemp ++ local LAST_ERR=/tmp/tmp.XXq1ixhLfH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.R6fx5Uos5S ++ cat /tmp/tmp.XXq1ixhLfH ++ rm /tmp/tmp.R6fx5Uos5S /tmp/tmp.XXq1ixhLfH ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W3seB5IKyZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.VWAVf5dzao ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W3seB5IKyZ ++ cat /tmp/tmp.VWAVf5dzao ++ rm /tmp/tmp.W3seB5IKyZ /tmp/tmp.VWAVf5dzao ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.v2rdeGADUl ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Q3KkVFO8WZ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.v2rdeGADUl +++++ cat /tmp/tmp.Q3KkVFO8WZ +++++ rm /tmp/tmp.v2rdeGADUl /tmp/tmp.Q3KkVFO8WZ +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kdhTh0WKUY ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.WID3bRr0R5 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kdhTh0WKUY +++++ cat /tmp/tmp.WID3bRr0R5 +++++ rm /tmp/tmp.kdhTh0WKUY /tmp/tmp.WID3bRr0R5 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dkPzWwJ9EK +++ mktemp ++ local LAST_ERR=/tmp/tmp.2Tf9VuZgNI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dkPzWwJ9EK ++ cat /tmp/tmp.2Tf9VuZgNI ++ rm /tmp/tmp.dkPzWwJ9EK /tmp/tmp.2Tf9VuZgNI ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ui7GvSD7SY +++ mktemp ++ local LAST_ERR=/tmp/tmp.WYjowPpq2S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ui7GvSD7SY ++ cat /tmp/tmp.WYjowPpq2S ++ rm /tmp/tmp.Ui7GvSD7SY /tmp/tmp.WYjowPpq2S ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql /tmp/tmp.o30MpYaJxV/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.lg7DmPWiq2 ++ mktemp + local LAST_ERR=/tmp/tmp.zPPXWAv8CR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lg7DmPWiq2 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.zPPXWAv8CR + rm /tmp/tmp.lg7DmPWiq2 /tmp/tmp.zPPXWAv8CR + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iF5Ei0Pgnt +++ mktemp ++ local LAST_ERR=/tmp/tmp.fMyjkK6NHi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iF5Ei0Pgnt ++ cat /tmp/tmp.fMyjkK6NHi ++ rm /tmp/tmp.iF5Ei0Pgnt /tmp/tmp.fMyjkK6NHi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BtH9Rer7O2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0cBUAXLXtH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BtH9Rer7O2 ++ cat /tmp/tmp.0cBUAXLXtH ++ rm /tmp/tmp.BtH9Rer7O2 /tmp/tmp.0cBUAXLXtH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yRbQifZ0XL +++ mktemp ++ local LAST_ERR=/tmp/tmp.MmDYUGdOTl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yRbQifZ0XL ++ cat /tmp/tmp.MmDYUGdOTl ++ rm /tmp/tmp.yRbQifZ0XL /tmp/tmp.MmDYUGdOTl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Bg3qPm7Q06 +++ mktemp ++ local LAST_ERR=/tmp/tmp.F9vonLNStJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Bg3qPm7Q06 ++ cat /tmp/tmp.F9vonLNStJ ++ rm /tmp/tmp.Bg3qPm7Q06 /tmp/tmp.F9vonLNStJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NzjaFuNRAS +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y3L4GqgfK6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NzjaFuNRAS ++ cat /tmp/tmp.Y3L4GqgfK6 ++ rm /tmp/tmp.NzjaFuNRAS /tmp/tmp.Y3L4GqgfK6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c115aY407C +++ mktemp ++ local LAST_ERR=/tmp/tmp.jiApSj3ATr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c115aY407C ++ cat /tmp/tmp.jiApSj3ATr ++ rm /tmp/tmp.c115aY407C /tmp/tmp.jiApSj3ATr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qfQuGwOjem +++ mktemp ++ local LAST_ERR=/tmp/tmp.9IBPEAE9wb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qfQuGwOjem ++ cat /tmp/tmp.9IBPEAE9wb ++ rm /tmp/tmp.qfQuGwOjem /tmp/tmp.9IBPEAE9wb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pbqMtTmhL2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ewv4cEyCbN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pbqMtTmhL2 ++ cat /tmp/tmp.Ewv4cEyCbN ++ rm /tmp/tmp.pbqMtTmhL2 /tmp/tmp.Ewv4cEyCbN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.knrKwhH5hb +++ mktemp ++ local LAST_ERR=/tmp/tmp.j4Akw57SjQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.knrKwhH5hb ++ cat /tmp/tmp.j4Akw57SjQ ++ rm /tmp/tmp.knrKwhH5hb /tmp/tmp.j4Akw57SjQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yP9WC4uCEh +++ mktemp ++ local LAST_ERR=/tmp/tmp.UO4uT5TxBf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yP9WC4uCEh ++ cat /tmp/tmp.UO4uT5TxBf ++ rm /tmp/tmp.yP9WC4uCEh /tmp/tmp.UO4uT5TxBf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rH4OIU8h99 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hnSpwOvLLO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rH4OIU8h99 ++ cat /tmp/tmp.hnSpwOvLLO ++ rm /tmp/tmp.rH4OIU8h99 /tmp/tmp.hnSpwOvLLO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6nANDer0LN +++ mktemp ++ local LAST_ERR=/tmp/tmp.twjW6gBavj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6nANDer0LN ++ cat /tmp/tmp.twjW6gBavj ++ rm /tmp/tmp.6nANDer0LN /tmp/tmp.twjW6gBavj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v2NBK0l81l +++ mktemp ++ local LAST_ERR=/tmp/tmp.4mUXh4gkw1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v2NBK0l81l ++ cat /tmp/tmp.4mUXh4gkw1 ++ rm /tmp/tmp.v2NBK0l81l /tmp/tmp.4mUXh4gkw1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PdE0FlRNjN +++ mktemp ++ local LAST_ERR=/tmp/tmp.2yqmwGJson ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PdE0FlRNjN ++ cat /tmp/tmp.2yqmwGJson ++ rm /tmp/tmp.PdE0FlRNjN /tmp/tmp.2yqmwGJson ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Rut7vF7FnA +++ mktemp ++ local LAST_ERR=/tmp/tmp.VSzxTD17js ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Rut7vF7FnA ++ cat /tmp/tmp.VSzxTD17js ++ rm /tmp/tmp.Rut7vF7FnA /tmp/tmp.VSzxTD17js ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PKB2kGYVUe +++ mktemp ++ local LAST_ERR=/tmp/tmp.OkhvV2RBUe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PKB2kGYVUe ++ cat /tmp/tmp.OkhvV2RBUe ++ rm /tmp/tmp.PKB2kGYVUe /tmp/tmp.OkhvV2RBUe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yGhI1NjzF5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6zFVFKepIU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yGhI1NjzF5 ++ cat /tmp/tmp.6zFVFKepIU ++ rm /tmp/tmp.yGhI1NjzF5 /tmp/tmp.6zFVFKepIU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DUd77QPod1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.wciLAii9H5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DUd77QPod1 ++ cat /tmp/tmp.wciLAii9H5 ++ rm /tmp/tmp.DUd77QPod1 /tmp/tmp.wciLAii9H5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hmB5OUyiFi +++ mktemp ++ local LAST_ERR=/tmp/tmp.hvye2CYShO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hmB5OUyiFi ++ cat /tmp/tmp.hvye2CYShO ++ rm /tmp/tmp.hmB5OUyiFi /tmp/tmp.hvye2CYShO ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vmH6ml17qe +++ mktemp ++ local LAST_ERR=/tmp/tmp.aN90Wx5Pxi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vmH6ml17qe ++ cat /tmp/tmp.aN90Wx5Pxi ++ rm /tmp/tmp.vmH6ml17qe /tmp/tmp.aN90Wx5Pxi ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xhISPRFIvN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZCIZe9Rkyq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xhISPRFIvN +++++ cat /tmp/tmp.ZCIZe9Rkyq +++++ rm /tmp/tmp.xhISPRFIvN /tmp/tmp.ZCIZe9Rkyq +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mKvi9t5B0X ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.KCBtVG8iYL +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mKvi9t5B0X +++++ cat /tmp/tmp.KCBtVG8iYL +++++ rm /tmp/tmp.mKvi9t5B0X /tmp/tmp.KCBtVG8iYL +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vgtv7OUkPZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gw2gHzH27n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vgtv7OUkPZ ++ cat /tmp/tmp.Gw2gHzH27n ++ rm /tmp/tmp.vgtv7OUkPZ /tmp/tmp.Gw2gHzH27n ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.nfzbzFVuNl ++ mktemp + local LAST_ERR=/tmp/tmp.AVjKiHqP0c + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nfzbzFVuNl secret/my-cluster-secrets-2 patched + cat /tmp/tmp.AVjKiHqP0c + rm /tmp/tmp.nfzbzFVuNl /tmp/tmp.AVjKiHqP0c + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dk6vI05jLb +++ mktemp ++ local LAST_ERR=/tmp/tmp.uPazwFmwWV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dk6vI05jLb ++ cat /tmp/tmp.uPazwFmwWV ++ rm /tmp/tmp.dk6vI05jLb /tmp/tmp.uPazwFmwWV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JtIvDymTy1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qe38lgDxSN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JtIvDymTy1 ++ cat /tmp/tmp.qe38lgDxSN ++ rm /tmp/tmp.JtIvDymTy1 /tmp/tmp.qe38lgDxSN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GFRiq2x9h5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AqHzh8creG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GFRiq2x9h5 ++ cat /tmp/tmp.AqHzh8creG ++ rm /tmp/tmp.GFRiq2x9h5 /tmp/tmp.AqHzh8creG ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aBxGU5t3hH +++ mktemp ++ local LAST_ERR=/tmp/tmp.fUEsPo2y2J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aBxGU5t3hH ++ cat /tmp/tmp.fUEsPo2y2J ++ rm /tmp/tmp.aBxGU5t3hH /tmp/tmp.fUEsPo2y2J ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8EOKL7N2rw ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.O6yYAbmYOf +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8EOKL7N2rw +++++ cat /tmp/tmp.O6yYAbmYOf +++++ rm /tmp/tmp.8EOKL7N2rw /tmp/tmp.O6yYAbmYOf +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.XKc7koRtTn ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.V9otTMCOrF +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.XKc7koRtTn +++++ cat /tmp/tmp.V9otTMCOrF +++++ rm /tmp/tmp.XKc7koRtTn /tmp/tmp.V9otTMCOrF +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MzR9iidRNI +++ mktemp ++ local LAST_ERR=/tmp/tmp.GnCbNqYCAm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MzR9iidRNI ++ cat /tmp/tmp.GnCbNqYCAm ++ rm /tmp/tmp.MzR9iidRNI /tmp/tmp.GnCbNqYCAm ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3tLR7iv0WS +++ mktemp ++ local LAST_ERR=/tmp/tmp.7m6HAh3g1O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3tLR7iv0WS ++ cat /tmp/tmp.7m6HAh3g1O ++ rm /tmp/tmp.3tLR7iv0WS /tmp/tmp.7m6HAh3g1O ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql /tmp/tmp.o30MpYaJxV/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.7bx6zgd0dr +++ mktemp ++ local LAST_ERR=/tmp/tmp.pS7oHyl0KB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7bx6zgd0dr ++ cat /tmp/tmp.pS7oHyl0KB ++ rm /tmp/tmp.7bx6zgd0dr /tmp/tmp.pS7oHyl0KB ++ return 0 + newpass='0<7#tcHm6&Vmxh}i&&x' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''0<7#tcHm6&Vmxh}i&&x'\'';' '-h some-name-pxc -uroot -p'\''0<7#tcHm6&Vmxh}i&&x'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''0<7#tcHm6&Vmxh}i&&x'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''0<7#tcHm6&Vmxh}i&&x'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O79riaCrHb +++ mktemp ++ local LAST_ERR=/tmp/tmp.2PZyvPT33f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O79riaCrHb ++ cat /tmp/tmp.2PZyvPT33f ++ rm /tmp/tmp.O79riaCrHb /tmp/tmp.2PZyvPT33f ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''0<7#tcHm6&Vmxh}i&&x'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''0<7#tcHm6&Vmxh}i&&x'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''0<7#tcHm6&Vmxh}i&&x'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''0<7#tcHm6&Vmxh}i&&x'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6IldxyKZMq +++ mktemp ++ local LAST_ERR=/tmp/tmp.dcatn59LgC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6IldxyKZMq ++ cat /tmp/tmp.dcatn59LgC ++ rm /tmp/tmp.6IldxyKZMq /tmp/tmp.dcatn59LgC ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql /tmp/tmp.o30MpYaJxV/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.fU3S4JcTL4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZBa9iDFpi9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fU3S4JcTL4 ++ cat /tmp/tmp.ZBa9iDFpi9 ++ rm /tmp/tmp.fU3S4JcTL4 /tmp/tmp.ZBa9iDFpi9 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.jUgVnLflm7 ++ mktemp + local LAST_ERR=/tmp/tmp.wMzlOUGWvJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jUgVnLflm7 secret/my-cluster-secrets-2 configured + cat /tmp/tmp.wMzlOUGWvJ Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.jUgVnLflm7 /tmp/tmp.wMzlOUGWvJ + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZJ9s1SHGyS +++ mktemp ++ local LAST_ERR=/tmp/tmp.Np08luezQn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZJ9s1SHGyS ++ cat /tmp/tmp.Np08luezQn ++ rm /tmp/tmp.ZJ9s1SHGyS /tmp/tmp.Np08luezQn ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-4.sql /tmp/tmp.o30MpYaJxV/select-4.sql + newpass=test-password2 ++ base64 ++ echo -n test-password2 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_OUT=/tmp/tmp.1REpdTmYgK + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2202-c5e2e681#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-17540~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + local LAST_ERR=/tmp/tmp.MJuxjeQWKj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/conf/some-name.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1REpdTmYgK perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.MJuxjeQWKj + rm /tmp/tmp.1REpdTmYgK /tmp/tmp.MJuxjeQWKj + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.us1Pkq7uI8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cDJaThd9zt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.us1Pkq7uI8 ++ cat /tmp/tmp.cDJaThd9zt ++ rm /tmp/tmp.us1Pkq7uI8 /tmp/tmp.cDJaThd9zt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gGBgdMnEn0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1VcPJF0DTt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gGBgdMnEn0 ++ cat /tmp/tmp.1VcPJF0DTt ++ rm /tmp/tmp.gGBgdMnEn0 /tmp/tmp.1VcPJF0DTt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mvVUYNUxcT +++ mktemp ++ local LAST_ERR=/tmp/tmp.mrIwtXFSQR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mvVUYNUxcT ++ cat /tmp/tmp.mrIwtXFSQR ++ rm /tmp/tmp.mvVUYNUxcT /tmp/tmp.mrIwtXFSQR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ebwxvzR6bP +++ mktemp ++ local LAST_ERR=/tmp/tmp.DEfO2utclL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ebwxvzR6bP ++ cat /tmp/tmp.DEfO2utclL ++ rm /tmp/tmp.ebwxvzR6bP /tmp/tmp.DEfO2utclL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Od78JfcFHj +++ mktemp ++ local LAST_ERR=/tmp/tmp.VRhJXuOILA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Od78JfcFHj ++ cat /tmp/tmp.VRhJXuOILA ++ rm /tmp/tmp.Od78JfcFHj /tmp/tmp.VRhJXuOILA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.spJaqsMR3s +++ mktemp ++ local LAST_ERR=/tmp/tmp.doxeqWhRTJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.spJaqsMR3s ++ cat /tmp/tmp.doxeqWhRTJ ++ rm /tmp/tmp.spJaqsMR3s /tmp/tmp.doxeqWhRTJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ok4UDoDP3L +++ mktemp ++ local LAST_ERR=/tmp/tmp.cz9K0nwCNj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ok4UDoDP3L ++ cat /tmp/tmp.cz9K0nwCNj ++ rm /tmp/tmp.Ok4UDoDP3L /tmp/tmp.cz9K0nwCNj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GlShtktE7l +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bm9NtRpSVP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GlShtktE7l ++ cat /tmp/tmp.Bm9NtRpSVP ++ rm /tmp/tmp.GlShtktE7l /tmp/tmp.Bm9NtRpSVP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UY1rBK9jCS +++ mktemp ++ local LAST_ERR=/tmp/tmp.o1JiQqLLJ8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UY1rBK9jCS ++ cat /tmp/tmp.o1JiQqLLJ8 ++ rm /tmp/tmp.UY1rBK9jCS /tmp/tmp.o1JiQqLLJ8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mpjhv45FOw +++ mktemp ++ local LAST_ERR=/tmp/tmp.JpscJiLhTW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Mpjhv45FOw ++ cat /tmp/tmp.JpscJiLhTW ++ rm /tmp/tmp.Mpjhv45FOw /tmp/tmp.JpscJiLhTW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sNydx2D99F +++ mktemp ++ local LAST_ERR=/tmp/tmp.SQJH94o45i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sNydx2D99F ++ cat /tmp/tmp.SQJH94o45i ++ rm /tmp/tmp.sNydx2D99F /tmp/tmp.SQJH94o45i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kTdVBIbClu +++ mktemp ++ local LAST_ERR=/tmp/tmp.VX6hk1dONy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kTdVBIbClu ++ cat /tmp/tmp.VX6hk1dONy ++ rm /tmp/tmp.kTdVBIbClu /tmp/tmp.VX6hk1dONy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2v09QpETw2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9U9PUc85K0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2v09QpETw2 ++ cat /tmp/tmp.9U9PUc85K0 ++ rm /tmp/tmp.2v09QpETw2 /tmp/tmp.9U9PUc85K0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7dldiRzLYf +++ mktemp ++ local LAST_ERR=/tmp/tmp.fZDLuBXp6d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7dldiRzLYf ++ cat /tmp/tmp.fZDLuBXp6d ++ rm /tmp/tmp.7dldiRzLYf /tmp/tmp.fZDLuBXp6d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L4PBDTp9Dx +++ mktemp ++ local LAST_ERR=/tmp/tmp.N3Ou3pCINY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L4PBDTp9Dx ++ cat /tmp/tmp.N3Ou3pCINY ++ rm /tmp/tmp.L4PBDTp9Dx /tmp/tmp.N3Ou3pCINY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.byEZXzuSeL +++ mktemp ++ local LAST_ERR=/tmp/tmp.k62RVdMEhS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.byEZXzuSeL ++ cat /tmp/tmp.k62RVdMEhS ++ rm /tmp/tmp.byEZXzuSeL /tmp/tmp.k62RVdMEhS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cmNhMOkfor +++ mktemp ++ local LAST_ERR=/tmp/tmp.oBSn7vTiAm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cmNhMOkfor ++ cat /tmp/tmp.oBSn7vTiAm ++ rm /tmp/tmp.cmNhMOkfor /tmp/tmp.oBSn7vTiAm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5a9D3SPO98 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ViIIkGKdhq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5a9D3SPO98 ++ cat /tmp/tmp.ViIIkGKdhq ++ rm /tmp/tmp.5a9D3SPO98 /tmp/tmp.ViIIkGKdhq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9TDTQ5oGMb +++ mktemp ++ local LAST_ERR=/tmp/tmp.L3QwJiVKGV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9TDTQ5oGMb ++ cat /tmp/tmp.L3QwJiVKGV ++ rm /tmp/tmp.9TDTQ5oGMb /tmp/tmp.L3QwJiVKGV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bT8pluURQg +++ mktemp ++ local LAST_ERR=/tmp/tmp.F5aNDoMy1z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bT8pluURQg ++ cat /tmp/tmp.F5aNDoMy1z ++ rm /tmp/tmp.bT8pluURQg /tmp/tmp.F5aNDoMy1z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tvGwyHDIJF +++ mktemp ++ local LAST_ERR=/tmp/tmp.GwzNuMzE99 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tvGwyHDIJF ++ cat /tmp/tmp.GwzNuMzE99 ++ rm /tmp/tmp.tvGwyHDIJF /tmp/tmp.GwzNuMzE99 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wtVkAUROrV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZjWpwHwWoU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wtVkAUROrV ++ cat /tmp/tmp.ZjWpwHwWoU ++ rm /tmp/tmp.wtVkAUROrV /tmp/tmp.ZjWpwHwWoU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JrSv3hj8WA +++ mktemp ++ local LAST_ERR=/tmp/tmp.lgctnfNKdw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JrSv3hj8WA ++ cat /tmp/tmp.lgctnfNKdw ++ rm /tmp/tmp.JrSv3hj8WA /tmp/tmp.lgctnfNKdw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cQONhdylID +++ mktemp ++ local LAST_ERR=/tmp/tmp.BVw4orRH0J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cQONhdylID ++ cat /tmp/tmp.BVw4orRH0J ++ rm /tmp/tmp.cQONhdylID /tmp/tmp.BVw4orRH0J ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ANUD1DUN6Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.QwElQvhqjX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ANUD1DUN6Y ++ cat /tmp/tmp.QwElQvhqjX ++ rm /tmp/tmp.ANUD1DUN6Y /tmp/tmp.QwElQvhqjX ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.a2dYxoFTfa ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.PiaSedwF1L +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.a2dYxoFTfa +++++ cat /tmp/tmp.PiaSedwF1L +++++ rm /tmp/tmp.a2dYxoFTfa /tmp/tmp.PiaSedwF1L +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9UZ7oSYBBB +++ mktemp ++ local LAST_ERR=/tmp/tmp.4lpxV0l61h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9UZ7oSYBBB ++ cat /tmp/tmp.4lpxV0l61h ++ rm /tmp/tmp.9UZ7oSYBBB /tmp/tmp.4lpxV0l61h ++ return 0 + [[ 2 == \3 ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.afv7M8xxxu +++ mktemp ++ local LAST_ERR=/tmp/tmp.4aaQ1xBCyh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.afv7M8xxxu ++ cat /tmp/tmp.4aaQ1xBCyh ++ rm /tmp/tmp.afv7M8xxxu /tmp/tmp.4aaQ1xBCyh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U4Bz7hIaOe +++ mktemp ++ local LAST_ERR=/tmp/tmp.WWc0rCe9x5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U4Bz7hIaOe ++ cat /tmp/tmp.WWc0rCe9x5 ++ rm /tmp/tmp.U4Bz7hIaOe /tmp/tmp.WWc0rCe9x5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.23ECpW7xnq +++ mktemp ++ local LAST_ERR=/tmp/tmp.meWUggncwf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.23ECpW7xnq ++ cat /tmp/tmp.meWUggncwf ++ rm /tmp/tmp.23ECpW7xnq /tmp/tmp.meWUggncwf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UcRMrK7Adx +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZAl4uFrU20 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UcRMrK7Adx ++ cat /tmp/tmp.ZAl4uFrU20 ++ rm /tmp/tmp.UcRMrK7Adx /tmp/tmp.ZAl4uFrU20 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kOQQZmLExY +++ mktemp ++ local LAST_ERR=/tmp/tmp.S0FdzsBdQ6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kOQQZmLExY ++ cat /tmp/tmp.S0FdzsBdQ6 ++ rm /tmp/tmp.kOQQZmLExY /tmp/tmp.S0FdzsBdQ6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1nTMbbqaVp +++ mktemp ++ local LAST_ERR=/tmp/tmp.HSHwCRZcC7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1nTMbbqaVp ++ cat /tmp/tmp.HSHwCRZcC7 ++ rm /tmp/tmp.1nTMbbqaVp /tmp/tmp.HSHwCRZcC7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qbhnkikfJs +++ mktemp ++ local LAST_ERR=/tmp/tmp.12019dQiUk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qbhnkikfJs ++ cat /tmp/tmp.12019dQiUk ++ rm /tmp/tmp.qbhnkikfJs /tmp/tmp.12019dQiUk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MtxE0lDebC +++ mktemp ++ local LAST_ERR=/tmp/tmp.khyQGAfUqd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MtxE0lDebC ++ cat /tmp/tmp.khyQGAfUqd ++ rm /tmp/tmp.MtxE0lDebC /tmp/tmp.khyQGAfUqd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E3QYwvKfkD +++ mktemp ++ local LAST_ERR=/tmp/tmp.H7w2bx2D0Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E3QYwvKfkD ++ cat /tmp/tmp.H7w2bx2D0Q ++ rm /tmp/tmp.E3QYwvKfkD /tmp/tmp.H7w2bx2D0Q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SRyRqs36Zn +++ mktemp ++ local LAST_ERR=/tmp/tmp.ns3Kt3c7yx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SRyRqs36Zn ++ cat /tmp/tmp.ns3Kt3c7yx ++ rm /tmp/tmp.SRyRqs36Zn /tmp/tmp.ns3Kt3c7yx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sJp9WqGjQ2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.w8IxJwDXbY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sJp9WqGjQ2 ++ cat /tmp/tmp.w8IxJwDXbY ++ rm /tmp/tmp.sJp9WqGjQ2 /tmp/tmp.w8IxJwDXbY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XwBjn4nlno +++ mktemp ++ local LAST_ERR=/tmp/tmp.dFTfOaiKWD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XwBjn4nlno ++ cat /tmp/tmp.dFTfOaiKWD ++ rm /tmp/tmp.XwBjn4nlno /tmp/tmp.dFTfOaiKWD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qnQCprNeYu +++ mktemp ++ local LAST_ERR=/tmp/tmp.rS0v2hmp0O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qnQCprNeYu ++ cat /tmp/tmp.rS0v2hmp0O ++ rm /tmp/tmp.qnQCprNeYu /tmp/tmp.rS0v2hmp0O ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cmTnoq6Juq +++ mktemp ++ local LAST_ERR=/tmp/tmp.eLk6eS0O4M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cmTnoq6Juq ++ cat /tmp/tmp.eLk6eS0O4M ++ rm /tmp/tmp.cmTnoq6Juq /tmp/tmp.eLk6eS0O4M ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.noYtfz9Xe1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Phub30OWZD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.noYtfz9Xe1 ++ cat /tmp/tmp.Phub30OWZD ++ rm /tmp/tmp.noYtfz9Xe1 /tmp/tmp.Phub30OWZD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Nzrl3jvBs1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.OkwP9jww0G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Nzrl3jvBs1 ++ cat /tmp/tmp.OkwP9jww0G ++ rm /tmp/tmp.Nzrl3jvBs1 /tmp/tmp.OkwP9jww0G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xvksiSxO0T +++ mktemp ++ local LAST_ERR=/tmp/tmp.ISRlrqQpP4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xvksiSxO0T ++ cat /tmp/tmp.ISRlrqQpP4 ++ rm /tmp/tmp.xvksiSxO0T /tmp/tmp.ISRlrqQpP4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rq27Y6kMdt +++ mktemp ++ local LAST_ERR=/tmp/tmp.u66SSGzp7r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rq27Y6kMdt ++ cat /tmp/tmp.u66SSGzp7r ++ rm /tmp/tmp.rq27Y6kMdt /tmp/tmp.u66SSGzp7r ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OTmWX9Rztz +++ mktemp ++ local LAST_ERR=/tmp/tmp.NMthx5hOa1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OTmWX9Rztz ++ cat /tmp/tmp.NMthx5hOa1 ++ rm /tmp/tmp.OTmWX9Rztz /tmp/tmp.NMthx5hOa1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vGqWK8P7dT +++ mktemp ++ local LAST_ERR=/tmp/tmp.0nZjhnlqZG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vGqWK8P7dT ++ cat /tmp/tmp.0nZjhnlqZG ++ rm /tmp/tmp.vGqWK8P7dT /tmp/tmp.0nZjhnlqZG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tJ9IROsj7Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.YHijSFmaqQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tJ9IROsj7Y ++ cat /tmp/tmp.YHijSFmaqQ ++ rm /tmp/tmp.tJ9IROsj7Y /tmp/tmp.YHijSFmaqQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ym288UtXfG +++ mktemp ++ local LAST_ERR=/tmp/tmp.ilI3tI4JFX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ym288UtXfG ++ cat /tmp/tmp.ilI3tI4JFX ++ rm /tmp/tmp.ym288UtXfG /tmp/tmp.ilI3tI4JFX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.67VdePBl3q +++ mktemp ++ local LAST_ERR=/tmp/tmp.JZZbjBGohT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.67VdePBl3q ++ cat /tmp/tmp.JZZbjBGohT ++ rm /tmp/tmp.67VdePBl3q /tmp/tmp.JZZbjBGohT ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KvgEz0nnqU +++ mktemp ++ local LAST_ERR=/tmp/tmp.A4aMXxEzYi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KvgEz0nnqU ++ cat /tmp/tmp.A4aMXxEzYi ++ rm /tmp/tmp.KvgEz0nnqU /tmp/tmp.A4aMXxEzYi ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.fFfMdlVIzk ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.EO9oI2bOhh +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.fFfMdlVIzk +++++ cat /tmp/tmp.EO9oI2bOhh +++++ rm /tmp/tmp.fFfMdlVIzk /tmp/tmp.EO9oI2bOhh +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mUKZcLYUMj +++ mktemp ++ local LAST_ERR=/tmp/tmp.iLCKwCEGTM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mUKZcLYUMj ++ cat /tmp/tmp.iLCKwCEGTM ++ rm /tmp/tmp.mUKZcLYUMj /tmp/tmp.iLCKwCEGTM ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 2 haproxy some-name + local generation=2 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XWAKI0EQuM +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZXcmolRer3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XWAKI0EQuM ++ cat /tmp/tmp.ZXcmolRer3 ++ rm /tmp/tmp.XWAKI0EQuM /tmp/tmp.ZXcmolRer3 ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.nARTk3FSEZ ++ mktemp + local LAST_ERR=/tmp/tmp.I3lGsEAIec + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nARTk3FSEZ secret/my-cluster-secrets patched + cat /tmp/tmp.I3lGsEAIec + rm /tmp/tmp.nARTk3FSEZ /tmp/tmp.I3lGsEAIec + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fyMz19n8X2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.A6CVSghX6j ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fyMz19n8X2 ++ cat /tmp/tmp.A6CVSghX6j ++ rm /tmp/tmp.fyMz19n8X2 /tmp/tmp.A6CVSghX6j ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.koDB9Xz4ww +++ mktemp ++ local LAST_ERR=/tmp/tmp.ojN0va5QP3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.koDB9Xz4ww ++ cat /tmp/tmp.ojN0va5QP3 ++ rm /tmp/tmp.koDB9Xz4ww /tmp/tmp.ojN0va5QP3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.igEcCkrmP0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WzN77vaXtx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.igEcCkrmP0 ++ cat /tmp/tmp.WzN77vaXtx ++ rm /tmp/tmp.igEcCkrmP0 /tmp/tmp.WzN77vaXtx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eBTgwr0sXY +++ mktemp ++ local LAST_ERR=/tmp/tmp.8zvbfWowAJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eBTgwr0sXY ++ cat /tmp/tmp.8zvbfWowAJ ++ rm /tmp/tmp.eBTgwr0sXY /tmp/tmp.8zvbfWowAJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2ZTqOUoyZT +++ mktemp ++ local LAST_ERR=/tmp/tmp.1fYfl53tQk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2ZTqOUoyZT ++ cat /tmp/tmp.1fYfl53tQk ++ rm /tmp/tmp.2ZTqOUoyZT /tmp/tmp.1fYfl53tQk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iB0wZiSpSp +++ mktemp ++ local LAST_ERR=/tmp/tmp.I9jab9wnbE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iB0wZiSpSp ++ cat /tmp/tmp.I9jab9wnbE ++ rm /tmp/tmp.iB0wZiSpSp /tmp/tmp.I9jab9wnbE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8KZBQldTco +++ mktemp ++ local LAST_ERR=/tmp/tmp.FWUVrJAhuu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8KZBQldTco ++ cat /tmp/tmp.FWUVrJAhuu ++ rm /tmp/tmp.8KZBQldTco /tmp/tmp.FWUVrJAhuu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o2sh6l9QRL +++ mktemp ++ local LAST_ERR=/tmp/tmp.ajtOqS0yXI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o2sh6l9QRL ++ cat /tmp/tmp.ajtOqS0yXI ++ rm /tmp/tmp.o2sh6l9QRL /tmp/tmp.ajtOqS0yXI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yPrfQm7YPU +++ mktemp ++ local LAST_ERR=/tmp/tmp.NPpC9w7jaj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yPrfQm7YPU ++ cat /tmp/tmp.NPpC9w7jaj ++ rm /tmp/tmp.yPrfQm7YPU /tmp/tmp.NPpC9w7jaj ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pJoUq8aaJp +++ mktemp ++ local LAST_ERR=/tmp/tmp.LYnw89rtPs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pJoUq8aaJp ++ cat /tmp/tmp.LYnw89rtPs ++ rm /tmp/tmp.pJoUq8aaJp /tmp/tmp.LYnw89rtPs ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qLRqSBX7Tt ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SAz5l1dlN4 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qLRqSBX7Tt +++++ cat /tmp/tmp.SAz5l1dlN4 +++++ rm /tmp/tmp.qLRqSBX7Tt /tmp/tmp.SAz5l1dlN4 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AU1xZwGHFz +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZmjQcw1XNC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AU1xZwGHFz ++ cat /tmp/tmp.ZmjQcw1XNC ++ rm /tmp/tmp.AU1xZwGHFz /tmp/tmp.ZmjQcw1XNC ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-3-57.sql ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KpeM61V6kM +++ mktemp ++ local LAST_ERR=/tmp/tmp.fFFmlpkSZK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KpeM61V6kM ++ cat /tmp/tmp.fFFmlpkSZK ++ rm /tmp/tmp.KpeM61V6kM /tmp/tmp.fFFmlpkSZK ++ return 0 + client_pod=pxc-client-857d976497-pvdtf + wait_pod pxc-client-857d976497-pvdtf + local pod=pxc-client-857d976497-pvdtf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-pvdtf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-pvdtf condition met waiting for pod/pxc-client-857d976497-pvdtf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.o30MpYaJxV/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2202/e2e-tests/users/compare/select-3.sql /tmp/tmp.o30MpYaJxV/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 3 haproxy some-name + local generation=3 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QA5G6HGmqX +++ mktemp ++ local LAST_ERR=/tmp/tmp.PWMnNSy9GE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QA5G6HGmqX ++ cat /tmp/tmp.PWMnNSy9GE ++ rm /tmp/tmp.QA5G6HGmqX /tmp/tmp.PWMnNSy9GE ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + destroy users-17540 + local namespace=users-17540 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + grep -v 'get backup status: Job.batch' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u + tee /tmp/tmp.o30MpYaJxV/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.6Lda5wSlbk +++ mktemp ++ local LAST_ERR=/tmp/tmp.EzeSL65cvd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6Lda5wSlbk ++ cat /tmp/tmp.EzeSL65cvd ++ rm /tmp/tmp.6Lda5wSlbk /tmp/tmp.EzeSL65cvd ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-d84c66648-5h9dt ++ mktemp + local LAST_OUT=/tmp/tmp.PdnzNJMY5l ++ mktemp + local LAST_ERR=/tmp/tmp.feX4jdUzF3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-d84c66648-5h9dt + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PdnzNJMY5l + cat /tmp/tmp.feX4jdUzF3 + rm /tmp/tmp.PdnzNJMY5l /tmp/tmp.feX4jdUzF3 + return 0 } }, }, { }, }, { }, }, ""), }, { }, }, }, - }, - { - }, - { - }, - }, + }, ... // 16 identical fields ... // 16 identical fields 2025-10-09T00:37:07.988Z INFO setup Manager starting up {"gitCommit": "c5e2e681577ec5135cf7e4f3a99405aadfebe3aa", "gitBranch": "PR-2202-c5e2e681", "buildTime": "2025-10-08T22:13:19Z", "goVersion": "go1.24.8", "os": "linux", "arch": "amd64"} 2025-10-09T00:37:07.988Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1008000"} 2025-10-09T00:37:07.991Z INFO setup Registering Components. 2025-10-09T00:37:08.995Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-10-09T00:37:08.995Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-10-09T00:37:08.995Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-10-09T00:37:08.995Z INFO controller-runtime.metrics Starting metrics server 2025-10-09T00:37:08.995Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-10-09T00:37:08.995Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-10-09T00:37:08.995Z INFO controller-runtime.webhook Starting webhook server 2025-10-09T00:37:08.995Z INFO setup Starting the Cmd. 2025-10-09T00:37:08.995Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-10-09T00:37:09.196Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-10-09T00:37:09.229Z DEBUG events percona-xtradb-cluster-operator-d84c66648-5h9dt_dd2ded0a-1831-4d8c-a2eb-7efed4f362f1 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"5484686e-552f-4397-89b4-fe5d0d45ec0b","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1759970229222143009"}, "reason": "LeaderElection"} 2025-10-09T00:37:09.229Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-10-09T00:37:09.230Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-10-09T00:37:09.230Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-10-09T00:37:09.230Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-10-09T00:37:09.230Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-10-09T00:37:09.330Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-10-09T00:37:09.330Z INFO Starting Controller {"controller": "pxc-controller"} 2025-10-09T00:37:09.330Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-10-09T00:37:09.330Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-10-09T00:37:09.330Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-10-09T00:37:09.330Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-10-09T00:37:50.990Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "0e993fd1-a61c-4637-8171-ed522800647b", "version": "1.19.0"} 2025-10-09T00:37:51.250Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "0e993fd1-a61c-4637-8171-ed522800647b", "secrets": "my-cluster-secrets"} 2025-10-09T00:37:51.470Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "0e993fd1-a61c-4637-8171-ed522800647b", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-09T00:37:51.490Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "0e993fd1-a61c-4637-8171-ed522800647b", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-09T00:37:52.067Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "0e993fd1-a61c-4637-8171-ed522800647b", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-09T00:37:52.563Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "500d324c-a830-4dac-baab-4d5b0eec7d5d", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-09T00:37:52.745Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "500d324c-a830-4dac-baab-4d5b0eec7d5d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-09T00:37:52.876Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "500d324c-a830-4dac-baab-4d5b0eec7d5d", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-09T00:37:53.015Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "500d324c-a830-4dac-baab-4d5b0eec7d5d", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-09T00:37:53.240Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "500d324c-a830-4dac-baab-4d5b0eec7d5d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-09T00:37:53.531Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "500d324c-a830-4dac-baab-4d5b0eec7d5d", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-09T00:37:54.477Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "2a337c1a-f5fb-4eb1-8197-f047c8ef6f3b", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-09T00:37:54.502Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "2a337c1a-f5fb-4eb1-8197-f047c8ef6f3b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-09T00:39:11.060Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "f535b464-e576-4693-b243-d53e9c3dab42", "user": "operator"} 2025-10-09T00:39:11.113Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "f535b464-e576-4693-b243-d53e9c3dab42", "user": "monitor"} 2025-10-09T00:39:11.154Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "f535b464-e576-4693-b243-d53e9c3dab42"} 2025-10-09T00:39:11.196Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "f535b464-e576-4693-b243-d53e9c3dab42", "user": "xtrabackup"} 2025-10-09T00:39:11.237Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "f535b464-e576-4693-b243-d53e9c3dab42"} 2025-10-09T00:39:11.247Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "f535b464-e576-4693-b243-d53e9c3dab42", "err": "get primary pxc pod: not found"} 2025-10-09T00:39:15.986Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "9e599dc4-4c6e-47a8-b0ec-8bab6f9e4963", "err": "get primary pxc pod: not found"} 2025-10-09T00:39:21.143Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3069e45c-514b-471a-ad90-2206855e74ca", "err": "get primary pxc pod: not found"} 2025-10-09T00:41:34.198Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "5599680a-9b97-4591-952d-3acfbd753e2f", "user": "root"} 2025-10-09T00:41:34.234Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "5599680a-9b97-4591-952d-3acfbd753e2f", "user": "replication"} 2025-10-09T00:41:34.277Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "5599680a-9b97-4591-952d-3acfbd753e2f", "new version": "5.7.44-48-57"} 2025-10-09T00:41:36.085Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "5599680a-9b97-4591-952d-3acfbd753e2f"} 2025-10-09T00:41:40.936Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "b34dbab1-8d4a-4f83-ac21-aaa98fa13a07"} 2025-10-09T00:41:46.135Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e6f52616-6ae4-4ecf-a973-e50f74195387"} 2025-10-09T00:41:51.347Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "5e9d9f09-6797-47f5-a901-a308a66e61e8"} 2025-10-09T00:41:56.764Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3a8b651f-9d0d-4cd1-8b04-d12ff0fd5fda"} 2025-10-09T00:42:01.845Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "51c7306e-133e-45c7-af06-ea5458568e4e"} 2025-10-09T00:42:07.303Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "c060bfbc-b0c9-45d9-b674-4a73ed1729fc"} 2025-10-09T00:42:12.939Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "02d4df7e-ec0c-4075-aeeb-d03a8bedc256"} 2025-10-09T00:42:18.035Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "332298a9-e339-4509-9b48-36bb11718ee9"} 2025-10-09T00:42:23.489Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "f47ac2d7-f26c-49ad-8f5b-97a78531496d"} 2025-10-09T00:42:28.351Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "7ea36871-6dc9-4ed7-b484-d8ee2845a823"} 2025-10-09T00:42:33.762Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "99cbece5-088b-4cbb-a1f9-ffd8e1e94c4a"} 2025-10-09T00:42:39.656Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "c80996dc-059d-49e3-91a0-8e6c4595386a"} 2025-10-09T00:42:45.036Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "508e4f46-a858-49b4-a0af-f5ab1b0746fd"} 2025-10-09T00:42:50.077Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "57f39dc5-1df0-4b93-b503-7b5eac7db302"} 2025-10-09T00:42:55.534Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55508a31-c0bf-4ba0-bd82-2483b3e7a85f"} 2025-10-09T00:43:00.844Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "fbc86f5f-ae3f-4bb7-af4b-08cf2e792aea"} 2025-10-09T00:43:06.141Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "f164c4bb-c03c-470b-82f4-63519e9e2649"} 2025-10-09T00:43:11.354Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "cacfb1ea-3e9c-49d1-bb9d-3bcde6e4a589"} 2025-10-09T00:43:16.678Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "98f348d0-404c-4bf1-901c-bf7fe325fe2e"} 2025-10-09T00:43:21.603Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "830ccffc-be90-4365-a5e1-8b5f04778066", "user": "root"} 2025-10-09T00:43:21.614Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "830ccffc-be90-4365-a5e1-8b5f04778066", "user": "root"} 2025-10-09T00:43:21.636Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "830ccffc-be90-4365-a5e1-8b5f04778066", "secret": "some-name-mysql-init", "user": "root"} 2025-10-09T00:43:22.329Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "fe96a05a-53f5-4e9f-8ce1-2639b8d7e013"} 2025-10-09T00:43:23.755Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "830ccffc-be90-4365-a5e1-8b5f04778066"} 2025-10-09T00:43:23.777Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "830ccffc-be90-4365-a5e1-8b5f04778066", "user": "root"} 2025-10-09T00:43:25.335Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "830ccffc-be90-4365-a5e1-8b5f04778066"} 2025-10-09T00:43:27.158Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "44ea9799-99c9-43ab-adfb-421cb1bd48a3"} 2025-10-09T00:43:32.557Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "d9412e22-63b8-4d2c-9d73-9c5d069344f5"} 2025-10-09T00:43:38.335Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "a6ac131f-a272-42d7-a561-842ae217815a"} 2025-10-09T00:43:42.836Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e235bf16-6d02-471d-a94b-98392dd129fa"} 2025-10-09T00:43:44.702Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "a5de4587-75f4-47b0-8108-7714b116bea9", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:43:45.022Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "a5de4587-75f4-47b0-8108-7714b116bea9", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:43:47.452Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "a5de4587-75f4-47b0-8108-7714b116bea9", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-09T00:44:08.302Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "dd09518c-de39-4e16-a438-f00b26b217af", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-09T00:44:12.116Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "69adfea0-a1a0-4388-8073-d6fccd32eb43", "user": "proxyadmin"} 2025-10-09T00:44:12.116Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "69adfea0-a1a0-4388-8073-d6fccd32eb43", "user": "proxyadmin"} 2025-10-09T00:44:12.160Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "69adfea0-a1a0-4388-8073-d6fccd32eb43", "user": "proxyadmin"} 2025-10-09T00:44:12.197Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "69adfea0-a1a0-4388-8073-d6fccd32eb43", "user": "proxyadmin"} 2025-10-09T00:44:12.197Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "69adfea0-a1a0-4388-8073-d6fccd32eb43", "last-applied-secret": "f1735c8096a3a5d5b00a3b693894feb293cd61b71c6cb43f38ecdb60f1076bb5"} 2025-10-09T00:44:12.201Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "69adfea0-a1a0-4388-8073-d6fccd32eb43", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:44:12.779Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "1398f205-c780-4070-a07f-bbb9d8e1d504", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:509) : Galera hostgroup retrieval failed. \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:509) : Galera hostgroup retrieval failed. \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-09T00:44:20.642Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "bf88c166-d636-4dab-acdf-d76d441b3651", "err": "get primary pxc pod: not found"} 2025-10-09T00:44:45.423Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "14a3a84a-9028-4ed7-8862-49191dc11f95"} 2025-10-09T00:44:50.439Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e3323aa-036d-4085-8208-8df1e90ae0d5"} 2025-10-09T00:44:55.525Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "fac3edf5-794b-4591-81fc-8c6696a5ec97"} 2025-10-09T00:44:56.770Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "73b0a6e4-b515-42d2-9d52-068a611940c5", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:44:56.831Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "73b0a6e4-b515-42d2-9d52-068a611940c5", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:44:58.691Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "73b0a6e4-b515-42d2-9d52-068a611940c5"} 2025-10-09T00:45:03.686Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "ccd1f943-1f6a-4309-bd1d-eb8864f4c314", "user": "xtrabackup"} 2025-10-09T00:45:03.696Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "ccd1f943-1f6a-4309-bd1d-eb8864f4c314", "user": "xtrabackup"} 2025-10-09T00:45:03.712Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "ccd1f943-1f6a-4309-bd1d-eb8864f4c314", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-09T00:45:03.732Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "ccd1f943-1f6a-4309-bd1d-eb8864f4c314", "user": "xtrabackup"} 2025-10-09T00:45:03.732Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "ccd1f943-1f6a-4309-bd1d-eb8864f4c314", "last-applied-secret": "9d8ede7b19f9f6e091c2cf0785bbedfbdc057fed246258adb98ee80e556cb82a"} 2025-10-09T00:45:03.735Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "ccd1f943-1f6a-4309-bd1d-eb8864f4c314", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:45:06.479Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "ccd1f943-1f6a-4309-bd1d-eb8864f4c314"} 2025-10-09T00:46:48.865Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "75b23355-2999-4191-95ff-97eb06bd2dde", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.56.208.60:33062: connect: connection refused"} 2025-10-09T00:46:54.027Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "9f533b00-72df-4be3-9059-c7eb1678e6fa", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:46:59.178Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "6eb50908-cb6f-460a-b4bb-de5c8140f8f0", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:47:04.380Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "edd3bbb0-c556-42c7-890a-7f59d95d8ab9", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:47:09.559Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "c6294e9b-9683-4ed0-bd4b-80e002a54ded", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:47:14.705Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "cf84387c-7011-4788-95cf-ec146f256643", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:47:19.852Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3954cec7-c6f2-4ae9-b5c3-e6148b140656", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:47:25.000Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "60e17be0-7693-41ec-9fc0-9dc4e9026b38", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:47:32.669Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "589806fb-52db-421b-b238-ad9ad1ec1556"} 2025-10-09T00:47:38.407Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "be70c6ee-04f6-4b0f-94c7-8b0aa01719cc"} 2025-10-09T00:47:39.836Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "db9e3ae2-4b63-43cb-baa6-aa1e423be9ed", "user": "monitor"} 2025-10-09T00:47:39.847Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "db9e3ae2-4b63-43cb-baa6-aa1e423be9ed", "user": "monitor"} 2025-10-09T00:47:39.893Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "db9e3ae2-4b63-43cb-baa6-aa1e423be9ed", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-09T00:47:39.911Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "db9e3ae2-4b63-43cb-baa6-aa1e423be9ed", "user": "monitor"} 2025-10-09T00:47:39.953Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "db9e3ae2-4b63-43cb-baa6-aa1e423be9ed", "user": "monitor"} 2025-10-09T00:47:39.953Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "db9e3ae2-4b63-43cb-baa6-aa1e423be9ed", "last-applied-secret": "832bedbd46d12ffe9b3d3b05e4a0a0093f93e6493edc4f463b30a11f2656e5fd"} 2025-10-09T00:47:39.959Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "db9e3ae2-4b63-43cb-baa6-aa1e423be9ed", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:47:42.527Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "db9e3ae2-4b63-43cb-baa6-aa1e423be9ed", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-09T00:48:44.286Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e9f0bd77-2beb-419a-a3f0-8368d91544af"} 2025-10-09T00:48:49.398Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "c30fa07c-8081-4729-a57b-0167b33252d0"} 2025-10-09T00:48:54.715Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "d66212aa-2a0c-4079-b5f6-e095325b2ea8"} 2025-10-09T00:49:00.041Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "98255101-086d-4202-984b-0efb607b4485"} 2025-10-09T00:49:05.030Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3145bca1-1893-44e0-8dda-6f9867201180"} 2025-10-09T00:49:05.442Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "7656d4cf-bf4f-4ead-bf7f-65a190c11d4d", "user": "operator"} 2025-10-09T00:49:05.452Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "7656d4cf-bf4f-4ead-bf7f-65a190c11d4d", "user": "operator"} 2025-10-09T00:49:05.484Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "7656d4cf-bf4f-4ead-bf7f-65a190c11d4d", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-09T00:49:05.510Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "7656d4cf-bf4f-4ead-bf7f-65a190c11d4d", "user": "operator"} 2025-10-09T00:49:05.510Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "7656d4cf-bf4f-4ead-bf7f-65a190c11d4d", "last-applied-secret": "3977eaf4779597539b358ec9ff000baa5c8fba9fc016a26c650c5f1a3bcf06b7"} 2025-10-09T00:49:05.516Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "7656d4cf-bf4f-4ead-bf7f-65a190c11d4d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:49:09.378Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "7656d4cf-bf4f-4ead-bf7f-65a190c11d4d", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-09T00:49:28.133Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "76a10a5e-ebbb-4941-972d-876c61d37cec"} 2025-10-09T00:49:32.680Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "5c0dd127-4df7-4a70-ae5f-57fe9666b8e2"} 2025-10-09T00:49:38.782Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "16a48f10-0e2b-4fd4-a712-3f1c9b5c7ff4"} 2025-10-09T00:49:43.100Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "59a64f21-c69c-4044-9f76-a5b7acf8bc4a"} 2025-10-09T00:49:49.147Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "secrets": "my-cluster-secrets-2"} 2025-10-09T00:49:49.147Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "root"} 2025-10-09T00:49:49.162Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "root"} 2025-10-09T00:49:49.186Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "secret": "some-name-mysql-init", "user": "root"} 2025-10-09T00:49:51.649Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5"} 2025-10-09T00:49:51.679Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "root"} 2025-10-09T00:49:51.679Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "operator"} 2025-10-09T00:49:51.690Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "operator"} 2025-10-09T00:49:51.713Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-09T00:49:51.740Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "operator"} 2025-10-09T00:49:51.740Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "monitor"} 2025-10-09T00:49:51.751Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "monitor"} 2025-10-09T00:49:51.769Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-09T00:49:51.785Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "monitor"} 2025-10-09T00:49:51.803Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "monitor"} 2025-10-09T00:49:51.803Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "xtrabackup"} 2025-10-09T00:49:51.815Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "xtrabackup"} 2025-10-09T00:49:51.836Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-09T00:49:51.854Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "xtrabackup"} 2025-10-09T00:49:51.854Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "replication"} 2025-10-09T00:49:51.865Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "replication"} 2025-10-09T00:49:51.889Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "secret": "some-name-mysql-init", "user": "replication"} 2025-10-09T00:49:51.916Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "replication"} 2025-10-09T00:49:51.916Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "proxyadmin"} 2025-10-09T00:49:51.933Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "proxyadmin"} 2025-10-09T00:49:51.955Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "user": "proxyadmin"} 2025-10-09T00:49:51.955Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "last-applied-secret": "94f2cce959fe6e95e7bfa640d3f8f5b41b6d9ab31a8d16ac8944dc4aa9a9bd6d"} 2025-10-09T00:49:51.955Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "last-applied-secret": "94f2cce959fe6e95e7bfa640d3f8f5b41b6d9ab31a8d16ac8944dc4aa9a9bd6d"} 2025-10-09T00:49:51.958Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:49:52.064Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:49:54.655Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "55d319e9-4b58-422b-8c90-d08bb5cd93e5", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-09T00:51:28.642Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "1873fbc2-7d43-426c-95cc-066c600e81e6", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17540 on 34.118.224.10:53: no such host"} 2025-10-09T00:51:29.343Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "cd962ba5-6011-4cd0-88cc-475019eeee0e", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17540 on 34.118.224.10:53: no such host"} 2025-10-09T00:51:33.950Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "c3d387f4-f55a-433e-a4b2-3bc747d807c4", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17540 on 34.118.224.10:53: no such host"} 2025-10-09T00:51:39.227Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "19682cd2-7c54-4cb3-86f0-8c03665006ab", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17540 on 34.118.224.10:53: no such host"} 2025-10-09T00:51:44.440Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "01d4b2be-8f80-4f3b-86ef-ef65eb7392c6", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:51:49.619Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "d778d3f2-f964-439a-a494-68ab5c3ca0fe", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:51:54.819Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "380e6b49-d8cc-494e-83a0-575f91289a10", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:51:59.960Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "99c7356e-b958-4e56-b0ce-23349a9bbfcd", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:52:05.136Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "4d709342-8c57-4b2c-b475-f61da08a39bc", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:52:10.277Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "84ba7515-8503-480a-928c-a4a65d236fd7", "primary name": "some-name-pxc-0.some-name-pxc.users-17540.svc.cluster.local"} 2025-10-09T00:52:17.922Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "25375785-fede-4e3b-b4c8-f46e90745034"} 2025-10-09T00:52:21.570Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "adfafa9c-57aa-4393-930a-c8657750ce42", "user": "operator"} 2025-10-09T00:52:21.582Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "adfafa9c-57aa-4393-930a-c8657750ce42", "user": "operator"} 2025-10-09T00:52:21.601Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "adfafa9c-57aa-4393-930a-c8657750ce42", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-09T00:52:21.622Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "adfafa9c-57aa-4393-930a-c8657750ce42", "user": "operator"} 2025-10-09T00:52:21.622Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "adfafa9c-57aa-4393-930a-c8657750ce42", "last-applied-secret": "7816cbdef57d27f1dbfd32995e61fbdbb5130a60927faceba146bc8da3a19bb2"} 2025-10-09T00:52:21.625Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "adfafa9c-57aa-4393-930a-c8657750ce42", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:52:25.690Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "ed95a4f4-3706-4ed6-9aea-0ef5b2a95c76", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-17540.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-09T00:52:55.686Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "cca0a132-f6cb-4628-8f8e-7907b4c6547c"} 2025-10-09T00:53:00.142Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "2e9922e5-de60-4242-bae5-5ab44d77945a"} 2025-10-09T00:53:05.618Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "b1edca94-8189-4438-b21a-037099d03eb5"} 2025-10-09T00:53:18.613Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "686ed4bd-d9d6-4462-b1ba-9ca24bb2c3e9"} 2025-10-09T00:53:24.729Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "eb562b36-3fa8-4dce-ba7e-29f497a84c58"} 2025-10-09T00:53:30.526Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "fbb99a73-b0ea-4c40-98ea-fd6df9a9b990"} 2025-10-09T00:53:35.853Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "6f32366b-c99a-4946-a5c4-ab12d386773c"} 2025-10-09T00:53:40.647Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "cd85f194-b17d-4901-885e-b1f496c992ff"} 2025-10-09T00:53:46.128Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "51f8e787-9307-421d-868c-edd2273ff61d"} 2025-10-09T00:53:51.447Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "9a51d424-82c8-4318-aa92-02ed1cf7ecba"} 2025-10-09T00:53:56.734Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "adbaea68-4c2e-4be9-80ec-f7f56ab79efd"} 2025-10-09T00:54:02.131Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "0eb0e5f4-84e1-45a0-924e-925d12b2de7c"} 2025-10-09T00:54:07.237Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "8eddb990-3483-4aeb-b4d6-40dfed946c21"} 2025-10-09T00:54:12.333Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "2656cd5d-78b4-4023-b2cc-bf2de13c9e35"} 2025-10-09T00:54:17.354Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "61f5f6c9-b4b4-4ddc-8db7-747b02ab7834"} 2025-10-09T00:54:17.524Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "root"} 2025-10-09T00:54:17.539Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "root"} 2025-10-09T00:54:17.558Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "secret": "some-name-mysql-init", "user": "root"} 2025-10-09T00:54:19.986Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45"} 2025-10-09T00:54:20.020Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "root"} 2025-10-09T00:54:20.020Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "monitor"} 2025-10-09T00:54:20.034Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "monitor"} 2025-10-09T00:54:20.057Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-09T00:54:20.074Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "monitor"} 2025-10-09T00:54:20.095Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "monitor"} 2025-10-09T00:54:20.095Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "xtrabackup"} 2025-10-09T00:54:20.107Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "xtrabackup"} 2025-10-09T00:54:20.126Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-09T00:54:20.144Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "xtrabackup"} 2025-10-09T00:54:20.144Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "proxyadmin"} 2025-10-09T00:54:20.160Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "proxyadmin"} 2025-10-09T00:54:20.186Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "user": "proxyadmin"} 2025-10-09T00:54:20.186Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "last-applied-secret": "cff995f684976f7baf4d36083e4f8b12a47ae9a9c357225ecdb80635dce65e4b"} 2025-10-09T00:54:20.186Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "last-applied-secret": "cff995f684976f7baf4d36083e4f8b12a47ae9a9c357225ecdb80635dce65e4b"} 2025-10-09T00:54:20.192Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:54:20.345Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:54:21.894Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "46d4f9fb-a4ef-4000-b89e-1970bdeafe45", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-09T00:54:40.970Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e5a25dc0-1a9c-49fd-b02d-659fb7e362de", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:54:41.051Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e5a25dc0-1a9c-49fd-b02d-659fb7e362de", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-09T00:54:41.090Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e5a25dc0-1a9c-49fd-b02d-659fb7e362de", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-09T00:54:41.165Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e5a25dc0-1a9c-49fd-b02d-659fb7e362de", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-09T00:54:41.253Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e5a25dc0-1a9c-49fd-b02d-659fb7e362de", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-09T00:54:41.964Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3c94cd4e-a134-47b2-9e9a-5cdaae21f57a", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-09T00:56:43.618Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "11c65ab7-d33d-4ded-b9fe-96320489a7c2", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17540 on 34.118.224.10:53: no such host"} 2025-10-09T00:56:43.976Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "8edaeffc-1245-463b-8ddb-332f2fdb1df3", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-17540 on 34.118.224.10:53: no such host"} 2025-10-09T00:57:26.487Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "root"} 2025-10-09T00:57:26.505Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "root"} 2025-10-09T00:57:26.525Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "secret": "some-name-mysql-init", "user": "root"} 2025-10-09T00:57:26.543Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "root"} 2025-10-09T00:57:26.543Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "operator"} 2025-10-09T00:57:26.552Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "operator"} 2025-10-09T00:57:26.569Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-09T00:57:26.593Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "operator"} 2025-10-09T00:57:26.594Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "monitor"} 2025-10-09T00:57:26.605Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "monitor"} 2025-10-09T00:57:26.625Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-09T00:57:26.654Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "monitor"} 2025-10-09T00:57:26.654Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "xtrabackup"} 2025-10-09T00:57:26.663Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "xtrabackup"} 2025-10-09T00:57:26.680Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-09T00:57:26.703Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "xtrabackup"} 2025-10-09T00:57:26.703Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "replication"} 2025-10-09T00:57:26.714Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "replication"} 2025-10-09T00:57:26.735Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "secret": "some-name-mysql-init", "user": "replication"} 2025-10-09T00:57:26.757Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "last-applied-secret": "3977eaf4779597539b358ec9ff000baa5c8fba9fc016a26c650c5f1a3bcf06b7"} 2025-10-09T00:57:26.757Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "user": "replication"} 2025-10-09T00:57:26.757Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "last-applied-secret": "3977eaf4779597539b358ec9ff000baa5c8fba9fc016a26c650c5f1a3bcf06b7"} 2025-10-09T00:57:26.763Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:57:26.810Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "3e8b78f6-a1dc-4b80-bc1d-35473c7c3cb7", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-09T00:59:51.560Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e0dc0e6a-e764-4c39-b91f-56b9336f913e", "user": "monitor"} 2025-10-09T00:59:51.572Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e0dc0e6a-e764-4c39-b91f-56b9336f913e", "user": "monitor"} 2025-10-09T00:59:51.595Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e0dc0e6a-e764-4c39-b91f-56b9336f913e", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-09T00:59:51.616Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e0dc0e6a-e764-4c39-b91f-56b9336f913e", "user": "monitor"} 2025-10-09T00:59:51.617Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e0dc0e6a-e764-4c39-b91f-56b9336f913e", "last-applied-secret": "fe4f3a667d6567ba27838c08c6c989c775904440f7176cc59448d92c90fa6c36"} 2025-10-09T00:59:51.620Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-17540", "name": "some-name", "reconcileID": "e0dc0e6a-e764-4c39-b91f-56b9336f913e", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} ... // 22 identical fields ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields - "3977eaf4779597539b358ec9ff000baa5c8fba9fc016a26c650c5f1a3bcf06b7", + "3977eaf4779597539b358ec9ff000baa5c8fba9fc016a26c650c5f1a3bcf06b7", ... // 3 identical elements ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields + "4f2cce959fe6e95e7bfa640d3f8f5b41b6d9ab31a8d16ac8944dc4aa9a9bd6d", ... // 4 identical fields ... // 5 identical fields ... // 5 identical fields ... // 5 identical fields ... // 6 identical fields ... // 6 identical fields - "7816", + "7816cbdef57d27f1dbfd32995e61fbdbb5130a60927faceba146bc8da3a19bb2", ... // 7 identical fields - "832bedbd46d12ffe9b3d3b05e4a0a0093f93e6493edc4f463b30a11f2656e5fd", + "832bedbd46d12ffe9b3d3b05e4a0a0093f93e6493edc4f463b30a11f2656e5fd", ... // 8 identical fields "9", - "94f2cce959fe6e95e7bfa640d3f8f5b41b6d9ab31a8d16ac8944dc4aa9a9bd6d", + "94f2cce959fe6e95e7bfa640d3f8f5b41b6d9ab31a8d16ac8944dc4aa9a9bd6d", ... // 9 identical fields ... // 9 identical fields AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ + APIVersion: "", - APIVersion: "apps/v1", - APIVersion: "apps/v1", - APIVersion: "v1", Args: {"haproxy"}, Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, - Args: []string{"logrotate"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, - "bdef57d27f1dbfd32995e61fbdbb5130a60927faceba146bc8da3a19bb2", "c", Capacity: nil, - "cff995f684976f7baf4d36083e4f8b12a47ae9a9c357225ecdb80635dce65e4b", + "cff995f684976f7baf4d36083e4f8b12a47ae9a9c357225ecdb80635dce65e4b", - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMapKeyRef: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 3307, ContainerPort: 3309, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, ContainerPort: 8404, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-10-09 00:37:52 +0000 UTC"}, - CreationTimestamp: v1.Time{Time: s"2025-10-09 00:54:41 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-haproxy-6669cdc549", - CurrentRevision: "some-name-haproxy-f46ffb66d", - CurrentRevision: "some-name-proxysql-565f448466", - CurrentRevision: "some-name-proxysql-58bc688659", - CurrentRevision: "some-name-proxysql-5c769c59bc", - CurrentRevision: "some-name-proxysql-5f4f9fd48d", - CurrentRevision: "some-name-proxysql-6784dfb6d", - CurrentRevision: "some-name-proxysql-99b54bd44", - CurrentRevision: "some-name-pxc-54fd568749", - CurrentRevision: "some-name-pxc-5dd59b947f", - CurrentRevision: "some-name-pxc-6d9945b9c6", - CurrentRevision: "some-name-pxc-854dbb476f", - "d8ede7b19f9f6e091c2cf0785bbedfbdc057fed246258adb98ee80e556cb82a", DataSource: nil, DataSourceRef: nil, - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-haproxy"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, - EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "READINESS_CHECK_TIMEOUT", Value: "1"}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, Env: []v1.EnvVar{ - Env: []v1.EnvVar{ EphemeralContainers: nil, - "f1735c8096a3a5d5b00a3b693894feb293cd61b71c6cb43f38ecdb60f1076bb5", FailureThreshold: 3, FC: nil, + "fe4f3a667d6567ba27838c08c6c989c775904440f7176cc59448d92c90fa6c36", + "ff995f684976f7baf4d36083e4f8b12a47ae9a9c357225ecdb80635dce65e4b", FieldPath: "metadata.name", FieldPath: "metadata.namespace", FieldRef: &v1.ObjectFieldSelector{ - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 HostAliases: nil, HostAliases: nil, HostIP: "", HostIPC: false, Hostname: "", HostPort: 0, - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", ImagePullPolicy: "Always", - ImagePullPolicy: "Always", ImagePullSecrets: nil, InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "haproxy", "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "3977eaf4779597539b358ec9ff000baa5c8fba9fc016a26c650c5f1a3bcf06b7", + "last-applied-secret": "9d8ede7b19f9f6e091c2cf0785bbedfbdc057fed246258adb98ee80e556cb82a", + "last-applied-secret": "f1735c8096a3a5d5b00a3b693894feb293cd61b71c6cb43f38ecdb60f1076bb5", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-haproxy"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, [mysql] 2025/10/09 00:59:20 packets.go:58 read tcp 10.56.209.4:54814->34.118.233.130:3306: i/o timeout Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, {Name: "CLUSTER_HASH", Value: "1529330"}, Name: "config", Name: "DEFAULT_AUTHENTICATION_PLUGIN", {Name: "haproxy-auto", VolumeSource: {EmptyDir: &{}}}, Name: "haproxy-custom", - {Name: "IS_LOGCOLLECTOR", Value: "yes"}, Name: "ist", {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - Name: "logrotate", - Name: "logs", {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, - {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"}, Name: "mysql-replicas", {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"}, Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, Name: "POD_NAME", Name: "POD_NAMESPASE", - {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, - {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, Name: "proxyadm", Name: "proxy-protocol", {Name: "READINESS_CHECK_TIMEOUT", Value: "15"}, - {Name: "SERVICE_TYPE", Value: "mysql"}, Name: "some-name-env-vars-haproxy", Namespace: "users-17540", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "f1735c8096a3a5d5b00a3b693894feb293cd61b71c6cb43f38ecdb60f1076bb5", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "cff995f684976f7baf4d36083e4f8b12a47ae9a9c357225ecdb80635dce65e4b", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "3ed7622b-96e9-453d-b3f9-594214fd9ba8", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzk3N2VhZjQ3Nzk1OTc1MzliMzU4ZWM5ZmYwMDBiYWE1YzhmYmE5ZmMwMTZhMjZjNjUwYzVmMWEzYmNmMDZiNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzk3N2VhZjQ3Nzk1OTc1MzliMzU4ZWM5ZmYwMDBiYWE1YzhmYmE5ZmMwMTZhMjZjNjUwYzVmMWEzYmNmMDZiNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzgxNmNiZGVmNTdkMjdmMWRiZmQzMjk5NWU2MWZiZGJiNTEzMGE2MDkyN2ZhY2ViYTE0NmJjOGRhM2ExOWJiMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzgxNmNiZGVmNTdkMjdmMWRiZmQzMjk5NWU2MWZiZGJiNTEzMGE2MDkyN2ZhY2ViYTE0NmJjOGRhM2ExOWJiMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODMyYmVkYmQ0NmQxMmZmZTliM2QzYjA1ZTRhMGEwMDkzZjkzZTY0OTNlZGM0ZjQ2M2IzMGExMWYyNjU2ZTVmZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODMyYmVkYmQ0NmQxMmZmZTliM2QzYjA1ZTRhMGEwMDkzZjkzZTY0OTNlZGM0ZjQ2M2IzMGExMWYyNjU2ZTVmZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTRmMmNjZTk1OWZlNmU5NWU3YmZhNjQwZDNmOGY1YjQxYjZkOWFiMzFhOGQxNmFjODk0NGRjNGFhOWE5YmQ2ZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTRmMmNjZTk1OWZlNmU5NWU3YmZhNjQwZDNmOGY1YjQxYjZkOWFiMzFhOGQxNmFjODk0NGRjNGFhOWE5YmQ2ZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2ZmOTk1ZjY4NDk3NmY3YmFmNGQzNjA4M2U0ZjhiMTJhNDdhZTlhOWMzNTcyMjVlY2RiODA2MzVkY2U2NWU0YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjE3MzVjODA5NmEzYTVkNWIwMGEzYjY5Mzg5NGZlYjI5M2NkNjFiNzFjNmNiNDNmMzhlY2RiNjBmMTA3NmJiNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzk3N2VhZjQ3Nzk1OTc1MzliMzU4ZWM5ZmYwMDBiYWE1YzhmYmE5ZmMwMTZhMjZjNjUwYzVmMWEzYmNmMDZiNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzk3N2VhZjQ3Nzk1OTc1MzliMzU4ZWM5ZmYwMDBiYWE1YzhmYmE5ZmMwMTZhMjZjNjUwYzVmMWEzYmNmMDZiNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZmU0ZjNhNjY3ZDY1NjdiYTI3ODM4YzA4YzZjOTg5Yzc3NTkwNDQ0MGY3MTc2Y2M1OTQ0OGQ5MmM5MGZhNmMzNiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSJ9fSwic3BlYyI6eyJ2b2x1bWVzIjpbeyJuYW1lIjoiaGFwcm94eS1jdXN0b20iLCJjb25maWdNYXAi"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzk3N2VhZjQ3Nzk1OTc1MzliMzU4ZWM5ZmYwMDBiYWE1YzhmYmE5ZmMwMTZhMjZjNjUwYzVmMWEzYmNmMDZiNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTRmMmNjZTk1OWZlNmU5NWU3YmZhNjQwZDNmOGY1YjQxYjZkOWFiMzFhOGQxNmFjODk0NGRjNGFhOWE5YmQ2ZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTRmMmNjZTk1OWZlNmU5NWU3YmZhNjQwZDNmOGY1YjQxYjZkOWFiMzFhOGQxNmFjODk0NGRjNGFhOWE5YmQ2ZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWQ4ZWRlN2IxOWY5ZjZlMDkxYzJjZjA3ODViYmVkZmJkYzA1N2ZlZDI0NjI1OGFkYjk4ZWU4MGU1NTZjYjgyYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWQ4ZWRlN2IxOWY5ZjZlMDkxYzJjZjA3ODViYmVkZmJkYzA1N2ZlZDI0NjI1OGFkYjk4ZWU4MGU1NTZjYjgyYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2ZmOTk1ZjY4NDk3NmY3YmFmNGQzNjA4M2U0ZjhiMTJhNDdhZTlhOWMzNTcyMjVlY2RiODA2MzVkY2U2NWU0YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2ZmOTk1ZjY4NDk3NmY3YmFmNGQzNjA4M2U0ZjhiMTJhNDdhZTlhOWMzNTcyMjVlY2RiODA2MzVkY2U2NWU0YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2ZmOTk1ZjY4NDk3NmY3YmFmNGQzNjA4M2U0ZjhiMTJhNDdhZTlhOWMzNTcyMjVlY2RiODA2MzVkY2U2NWU0YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjAyLWM1ZTJlNjgxIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM1LjciLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2ZmOTk1ZjY4NDk3NmY3YmFmNGQzNjA4M2U0ZjhiMTJhNDdhZTlhOWMzNTcyMjVlY2RiODA2MzVkY2U2NWU0YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjAyLWM1ZTJlNjgxIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzUuNyIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIxNTI5MzMwIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjE3MzVjODA5NmEzYTVkNWIwMGEzYjY5Mzg5NGZlYjI5M2NkNjFiNzFjNmNiNDNmMzhlY2RiNjBmMTA3NmJiNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: nil, Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, ResizePolicy: nil, ResourceFieldRef: nil, Resources: {}, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1759970316970959023", - ResourceVersion: "1759970492532271019", - ResourceVersion: "1759970642825135023", - ResourceVersion: "1759970680641871023", - ResourceVersion: "1759970697922383023", - ResourceVersion: "1759970848211295019", - ResourceVersion: "1759970895087839023", - ResourceVersion: "1759970962386415023", - ResourceVersion: "1759971007161567023", - ResourceVersion: "1759971132833599019", - ResourceVersion: "1759971171377759023", - ResourceVersion: "1759971273595167019", - ResourceVersion: "1759971348101567014", - ResourceVersion: "1759971445329695019", - ResourceVersion: "1759971521607519014", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", + SchedulerName: "", - SchedulerName: "default-scheduler", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-env-vars-haproxy", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, + SecurityContext: nil, - SecurityContext: s"&PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmorProfile:nil,SupplementalGroupsPolicy:nil,SELinux"..., Selector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-haproxy", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, ShareProcessNamespace: nil, sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-10-09 00:37:52 +0000 UTC", - Time: s"2025-10-09 00:38:36 +0000 UTC", - Time: s"2025-10-09 00:41:32 +0000 UTC", - Time: s"2025-10-09 00:43:44 +0000 UTC", - Time: s"2025-10-09 00:44:02 +0000 UTC", - Time: s"2025-10-09 00:44:12 +0000 UTC", - Time: s"2025-10-09 00:44:40 +0000 UTC", - Time: s"2025-10-09 00:44:56 +0000 UTC", - Time: s"2025-10-09 00:44:57 +0000 UTC", - Time: s"2025-10-09 00:45:03 +0000 UTC", - Time: s"2025-10-09 00:47:28 +0000 UTC", - Time: s"2025-10-09 00:47:39 +0000 UTC", - Time: s"2025-10-09 00:48:15 +0000 UTC", - Time: s"2025-10-09 00:49:05 +0000 UTC", - Time: s"2025-10-09 00:49:22 +0000 UTC", - Time: s"2025-10-09 00:49:51 +0000 UTC", - Time: s"2025-10-09 00:49:52 +0000 UTC", - Time: s"2025-10-09 00:50:07 +0000 UTC", - Time: s"2025-10-09 00:52:12 +0000 UTC", - Time: s"2025-10-09 00:52:21 +0000 UTC", - Time: s"2025-10-09 00:52:51 +0000 UTC", - Time: s"2025-10-09 00:54:20 +0000 UTC", - Time: s"2025-10-09 00:54:33 +0000 UTC", - Time: s"2025-10-09 00:54:40 +0000 UTC", - Time: s"2025-10-09 00:54:41 +0000 UTC", - Time: s"2025-10-09 00:55:48 +0000 UTC", - Time: s"2025-10-09 00:57:25 +0000 UTC", - Time: s"2025-10-09 00:57:26 +0000 UTC", - Time: s"2025-10-09 00:58:41 +0000 UTC", Tolerations: {{Key: "node.alpha.kubernetes.io/unreachable", Operator: "Exists", Effect: "NoExecute", TolerationSeconds: &6000}}, Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "2fe0b767-32e9-466b-abb4-276c6f33c51d", - UID: "4ec9bdf8-dc25-4556-9ab9-cef956d56d88", - UID: "6c294e49-c001-44c3-a6e0-2a86ddd30548", + UpdatedReplicas: 0, - UpdatedReplicas: 1, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-haproxy-6669cdc549", - UpdateRevision: "some-name-haproxy-f46ffb66d", - UpdateRevision: "some-name-proxysql-565f448466", - UpdateRevision: "some-name-proxysql-58bc688659", - UpdateRevision: "some-name-proxysql-5c769c59bc", - UpdateRevision: "some-name-proxysql-5f4f9fd48d", - UpdateRevision: "some-name-proxysql-6784dfb6d", - UpdateRevision: "some-name-proxysql-99b54bd44", - UpdateRevision: "some-name-pxc-54fd568749", - UpdateRevision: "some-name-pxc-57d9c49876", - UpdateRevision: "some-name-pxc-5dd59b947f", - UpdateRevision: "some-name-pxc-6d9945b9c6", - UpdateRevision: "some-name-pxc-854dbb476f", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ Value: "", + Value: "caching_sha2_password", ValueFrom: nil, ValueFrom: &v1.EnvVarSource{ - Value: "mysql_native_password", VolumeAttributesClassName: nil, VolumeClaimTemplates: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, - VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-17540 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.XcZjRrn4Ur ++ mktemp + local LAST_ERR=/tmp/tmp.nmMHX88Dmd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XcZjRrn4Ur perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-17540 namespace + cat /tmp/tmp.nmMHX88Dmd + rm /tmp/tmp.XcZjRrn4Ur /tmp/tmp.nmMHX88Dmd + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.4zMzoHl3LG ++ mktemp + local LAST_ERR=/tmp/tmp.R5NLwxrEUZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4zMzoHl3LG No resources found + cat /tmp/tmp.R5NLwxrEUZ + rm /tmp/tmp.4zMzoHl3LG /tmp/tmp.R5NLwxrEUZ + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.SZIgsYf9ic ++ mktemp + local LAST_ERR=/tmp/tmp.IoFITn86t6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SZIgsYf9ic No resources found + cat /tmp/tmp.IoFITn86t6 + rm /tmp/tmp.SZIgsYf9ic /tmp/tmp.IoFITn86t6 + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.lljB1eyyWW ++ mktemp + local LAST_ERR=/tmp/tmp.500XOHbfxl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lljB1eyyWW validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.500XOHbfxl + rm /tmp/tmp.lljB1eyyWW /tmp/tmp.500XOHbfxl + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-17540 + rm -rf /tmp/tmp.o30MpYaJxV + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.xiDbsjETCL + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.vY2pPXKtE0 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.Wxe6Zwzxbv + local exit_status=0 + local LAST_ERR=/tmp/tmp.9dzr22joK4 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-17540 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator