Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/logs/users-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-24200 + local ns=users-24200 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-22715 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.eMybnHD2a5 ++ mktemp + local LAST_ERR=/tmp/tmp.oXOPYISXWq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eMybnHD2a5 perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-22715 namespace + cat /tmp/tmp.oXOPYISXWq + rm /tmp/tmp.eMybnHD2a5 /tmp/tmp.oXOPYISXWq + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.tRmcB92kiE ++ mktemp + local LAST_ERR=/tmp/tmp.P8AnWtg9PH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tRmcB92kiE No resources found + cat /tmp/tmp.P8AnWtg9PH + rm /tmp/tmp.tRmcB92kiE /tmp/tmp.P8AnWtg9PH + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.xo12UsFJq6 ++ mktemp + local LAST_ERR=/tmp/tmp.FUG6czhAVh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xo12UsFJq6 No resources found + cat /tmp/tmp.FUG6czhAVh + rm /tmp/tmp.xo12UsFJq6 /tmp/tmp.FUG6czhAVh + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.KnfM3TWap7 + local LAST_OUT=/tmp/tmp.1NDI381Fhz ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.KabAJQ0Q2k + local exit_status=0 + local LAST_ERR=/tmp/tmp.EPllZgi5ch + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1NDI381Fhz + cat /tmp/tmp.EPllZgi5ch + rm /tmp/tmp.1NDI381Fhz /tmp/tmp.EPllZgi5ch + return 0 namespace "users-22715" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KnfM3TWap7 namespace "pxc-operator" deleted + cat /tmp/tmp.KabAJQ0Q2k + rm /tmp/tmp.KnfM3TWap7 /tmp/tmp.KabAJQ0Q2k + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.SglRJaH8X1 ++ mktemp + local LAST_ERR=/tmp/tmp.WUFgXoC1sA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SglRJaH8X1 namespace/pxc-operator created + cat /tmp/tmp.WUFgXoC1sA + rm /tmp/tmp.SglRJaH8X1 /tmp/tmp.WUFgXoC1sA + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.pKqFUQZ8sQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.EMtxgHxMyD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pKqFUQZ8sQ ++ cat /tmp/tmp.EMtxgHxMyD ++ rm /tmp/tmp.pKqFUQZ8sQ /tmp/tmp.EMtxgHxMyD ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster1 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.pRk6O4cT8R ++ mktemp + local LAST_ERR=/tmp/tmp.3n4CwSLmS3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster1 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pRk6O4cT8R Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster1" modified. + cat /tmp/tmp.3n4CwSLmS3 + rm /tmp/tmp.pRk6O4cT8R /tmp/tmp.3n4CwSLmS3 + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.y2POlKWd9D ++ mktemp + local LAST_ERR=/tmp/tmp.3WAkyfTx02 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.y2POlKWd9D customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.3WAkyfTx02 + rm /tmp/tmp.y2POlKWd9D /tmp/tmp.3WAkyfTx02 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/cw-rbac.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.s8I0Zs8Tgm ++ mktemp + local LAST_ERR=/tmp/tmp.xX5X9eSPsC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.s8I0Zs8Tgm clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.xX5X9eSPsC + rm /tmp/tmp.s8I0Zs8Tgm /tmp/tmp.xX5X9eSPsC + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2154-7a623b10^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.yOlWvRirpp ++ mktemp + local LAST_ERR=/tmp/tmp.l3pIW8tWod + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yOlWvRirpp deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.l3pIW8tWod + rm /tmp/tmp.yOlWvRirpp /tmp/tmp.l3pIW8tWod + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.kUfiUXBVbW ++ mktemp + local LAST_ERR=/tmp/tmp.y8rt0UXocm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kUfiUXBVbW pod/percona-xtradb-cluster-operator-6cf85965f9-74zrm condition met + cat /tmp/tmp.y8rt0UXocm + rm /tmp/tmp.kUfiUXBVbW /tmp/tmp.y8rt0UXocm + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.r4CS3oj8B3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qzkT8kXvkZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r4CS3oj8B3 ++ cat /tmp/tmp.qzkT8kXvkZ ++ rm /tmp/tmp.r4CS3oj8B3 /tmp/tmp.qzkT8kXvkZ ++ return 0 + wait_pod percona-xtradb-cluster-operator-6cf85965f9-74zrm 480 pxc-operator + local pod=percona-xtradb-cluster-operator-6cf85965f9-74zrm + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-6cf85965f9-74zrm ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-6cf85965f9-74zrm condition met waiting for pod/percona-xtradb-cluster-operator-6cf85965f9-74zrm to become Ready.Ok + sleep 3 + create_namespace users-24200 + local namespace=users-24200 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces users-24200' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-24200 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-24200 ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.LuAwbLdqlT + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + local LAST_ERR=/tmp/tmp.pej2FbNMrE + local exit_status=0 + awk '{print$1}' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-24200 ++ mktemp + local LAST_OUT=/tmp/tmp.qQrxBD0jnR ++ mktemp + local LAST_ERR=/tmp/tmp.SncZavPaWm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qQrxBD0jnR + cat /tmp/tmp.SncZavPaWm + rm /tmp/tmp.qQrxBD0jnR /tmp/tmp.SncZavPaWm + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-24200 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-24200 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.LuAwbLdqlT + cat /tmp/tmp.pej2FbNMrE Error from server (NotFound): namespaces "users-24200" not found + rm /tmp/tmp.LuAwbLdqlT /tmp/tmp.pej2FbNMrE + return 1 + : + wait_for_delete namespace/users-24200 + local res=namespace/users-24200 + echo -n 'waiting for namespace/users-24200 to be deleted' waiting for namespace/users-24200 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-24200" not found + desc 'create namespace users-24200' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-24200 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-24200 ++ mktemp + local LAST_OUT=/tmp/tmp.jm9fheqB3w ++ mktemp + local LAST_ERR=/tmp/tmp.pZn99prowi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-24200 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jm9fheqB3w namespace/users-24200 created + cat /tmp/tmp.pZn99prowi + rm /tmp/tmp.jm9fheqB3w /tmp/tmp.pZn99prowi + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.1gOWFScwNJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Us7hpqthWs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1gOWFScwNJ ++ cat /tmp/tmp.Us7hpqthWs ++ rm /tmp/tmp.1gOWFScwNJ /tmp/tmp.Us7hpqthWs ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster1 --namespace=users-24200 ++ mktemp + local LAST_OUT=/tmp/tmp.wl22xKmSWa ++ mktemp + local LAST_ERR=/tmp/tmp.0Fyjp2kOcq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster1 --namespace=users-24200 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wl22xKmSWa Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-7a623b10-7-cluster1" modified. + cat /tmp/tmp.0Fyjp2kOcq + rm /tmp/tmp.wl22xKmSWa /tmp/tmp.0Fyjp2kOcq + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.3cFUgRCBWM ++ mktemp + local LAST_ERR=/tmp/tmp.VdQGtXQG18 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3cFUgRCBWM secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.VdQGtXQG18 + rm /tmp/tmp.3cFUgRCBWM /tmp/tmp.VdQGtXQG18 + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.BDDVprsdlN ++ mktemp + local LAST_ERR=/tmp/tmp.HbRarvAqFi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BDDVprsdlN secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.HbRarvAqFi + rm /tmp/tmp.BDDVprsdlN /tmp/tmp.HbRarvAqFi + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/client.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/client.yml ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/client.yml + local LAST_OUT=/tmp/tmp.CyXiXMvXDR + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2154-7a623b10#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_ERR=/tmp/tmp.Wn4Qqo1mji + local exit_status=0 + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-24200~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CyXiXMvXDR deployment.apps/pxc-client created + cat /tmp/tmp.Wn4Qqo1mji + rm /tmp/tmp.CyXiXMvXDR /tmp/tmp.Wn4Qqo1mji + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.SyM5AeKmx9 ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-24200~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_ERR=/tmp/tmp.bYotY3Xzqb + local exit_status=0 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2154-7a623b10#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SyM5AeKmx9 perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.bYotY3Xzqb + rm /tmp/tmp.SyM5AeKmx9 /tmp/tmp.bYotY3Xzqb + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.RqpRjuoprz ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Fey1LWrpdJ +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.RqpRjuoprz +++ cat /tmp/tmp.Fey1LWrpdJ +++ rm /tmp/tmp.RqpRjuoprz /tmp/tmp.Fey1LWrpdJ +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.uW7CsgLWVI ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Gh17vbSx4r +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.uW7CsgLWVI +++ cat /tmp/tmp.Gh17vbSx4r +++ rm /tmp/tmp.uW7CsgLWVI /tmp/tmp.Gh17vbSx4r +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-24200 ++ mktemp + local LAST_OUT=/tmp/tmp.JfWESgJRjl ++ mktemp + local LAST_ERR=/tmp/tmp.iBYEoZuuul + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-24200 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-24200 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-24200 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.JfWESgJRjl + cat /tmp/tmp.iBYEoZuuul error: no matching resources found + rm /tmp/tmp.JfWESgJRjl /tmp/tmp.iBYEoZuuul + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.s2y4CDzmPs +++ mktemp ++ local LAST_ERR=/tmp/tmp.yZu391tNh6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s2y4CDzmPs ++ cat /tmp/tmp.yZu391tNh6 ++ rm /tmp/tmp.s2y4CDzmPs /tmp/tmp.yZu391tNh6 ++ return 0 + local 'root_pass=,HBy~nhAJ.h!*)RyD+' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bqvYRNMSv2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aZZCLfFRi8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bqvYRNMSv2 ++ cat /tmp/tmp.aZZCLfFRi8 ++ rm /tmp/tmp.bqvYRNMSv2 /tmp/tmp.aZZCLfFRi8 ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZBHxq20ypd +++ mktemp ++ local LAST_ERR=/tmp/tmp.cbAKDa8xVY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZBHxq20ypd ++ cat /tmp/tmp.cbAKDa8xVY ++ rm /tmp/tmp.ZBHxq20ypd /tmp/tmp.cbAKDa8xVY ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ULH3HQo3t8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uBaMG3ydRh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ULH3HQo3t8 ++ cat /tmp/tmp.uBaMG3ydRh ++ rm /tmp/tmp.ULH3HQo3t8 /tmp/tmp.uBaMG3ydRh ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql /tmp/tmp.JFgyIkaORV/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Of7X7xnDws +++ mktemp ++ local LAST_ERR=/tmp/tmp.V7Y0n4ERii ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Of7X7xnDws ++ cat /tmp/tmp.V7Y0n4ERii ++ rm /tmp/tmp.Of7X7xnDws /tmp/tmp.V7Y0n4ERii ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql /tmp/tmp.JFgyIkaORV/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dwHdUxWMhV +++ mktemp ++ local LAST_ERR=/tmp/tmp.lS6SbP5zFF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dwHdUxWMhV ++ cat /tmp/tmp.lS6SbP5zFF ++ rm /tmp/tmp.dwHdUxWMhV /tmp/tmp.lS6SbP5zFF ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql /tmp/tmp.JFgyIkaORV/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1jmZrYmwq6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rpl1hFhgmz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1jmZrYmwq6 ++ cat /tmp/tmp.rpl1hFhgmz Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.1jmZrYmwq6 /tmp/tmp.rpl1hFhgmz ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.wTOTJzG8s7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WH18xyWUbX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wTOTJzG8s7 ++ cat /tmp/tmp.WH18xyWUbX ++ rm /tmp/tmp.wTOTJzG8s7 /tmp/tmp.WH18xyWUbX ++ return 0 + secret_pass=',HBy~nhAJ.h!*)RyD+' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.JIeT2hGjzz +++ mktemp ++ local LAST_ERR=/tmp/tmp.p0VkIwXHWH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JIeT2hGjzz ++ cat /tmp/tmp.p0VkIwXHWH ++ rm /tmp/tmp.JIeT2hGjzz /tmp/tmp.p0VkIwXHWH ++ return 0 + int_secret_pass=',HBy~nhAJ.h!*)RyD+' + [[ -z ,HBy~nhAJ.h!*)RyD+ ]] + [[ ,HBy~nhAJ.h!*)RyD+ != \,\H\B\y\~\n\h\A\J\.\h\!\*\)\R\y\D\+ ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\'',HBy~nhAJ.h!*)RyD+'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nwCKWGtidi +++ mktemp ++ local LAST_ERR=/tmp/tmp.e8Rv2SDO1G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nwCKWGtidi ++ cat /tmp/tmp.e8Rv2SDO1G ++ rm /tmp/tmp.nwCKWGtidi /tmp/tmp.e8Rv2SDO1G ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.JFgyIkaORV/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.klesZ6xL1V +++ mktemp ++ local LAST_ERR=/tmp/tmp.mlwF0Q0gkV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.klesZ6xL1V ++ cat /tmp/tmp.mlwF0Q0gkV ++ rm /tmp/tmp.klesZ6xL1V /tmp/tmp.mlwF0Q0gkV ++ return 0 + secret_pass='L=(&G3d1b$I>fiGab' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ho2kFbleN6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kPoOyXbJjG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ho2kFbleN6 ++ cat /tmp/tmp.kPoOyXbJjG ++ rm /tmp/tmp.ho2kFbleN6 /tmp/tmp.kPoOyXbJjG ++ return 0 + int_secret_pass='L=(&G3d1b$I>fiGab' + [[ -z L=(&G3d1b$I>fiGab ]] + [[ L=(&G3d1b$I>fiGab != \L\=\(\&\G\3\d\1\b\$\I\>\f\i\G\a\b ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''L=(&G3d1b$I>fiGab'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''L=(&G3d1b$I>fiGab'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''L=(&G3d1b$I>fiGab'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''L=(&G3d1b$I>fiGab'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mYRDkDCvf7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FMItgWodV0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mYRDkDCvf7 ++ cat /tmp/tmp.FMItgWodV0 ++ rm /tmp/tmp.mYRDkDCvf7 /tmp/tmp.FMItgWodV0 ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.JFgyIkaORV/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.mgPOUVcf3S +++ mktemp ++ local LAST_ERR=/tmp/tmp.tTRKyTudFF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mgPOUVcf3S ++ cat /tmp/tmp.tTRKyTudFF ++ rm /tmp/tmp.mgPOUVcf3S /tmp/tmp.tTRKyTudFF ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.J8cKrJr5NH +++ mktemp ++ local LAST_ERR=/tmp/tmp.wRa2U30WK8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J8cKrJr5NH ++ cat /tmp/tmp.wRa2U30WK8 ++ rm /tmp/tmp.J8cKrJr5NH /tmp/tmp.wRa2U30WK8 ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OfEdo3gYkP +++ mktemp ++ local LAST_ERR=/tmp/tmp.LVdi2viaIp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OfEdo3gYkP ++ cat /tmp/tmp.LVdi2viaIp ++ rm /tmp/tmp.OfEdo3gYkP /tmp/tmp.LVdi2viaIp ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.JFgyIkaORV/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.OGM3ewGgWv +++ mktemp ++ local LAST_ERR=/tmp/tmp.NMmvdgIWQG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OGM3ewGgWv ++ cat /tmp/tmp.NMmvdgIWQG ++ rm /tmp/tmp.OGM3ewGgWv /tmp/tmp.NMmvdgIWQG ++ return 0 + secret_pass='F9GCEGP!OedPfBIE?ID' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.9BxFyLCYO8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dZGP88FUi1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9BxFyLCYO8 ++ cat /tmp/tmp.dZGP88FUi1 ++ rm /tmp/tmp.9BxFyLCYO8 /tmp/tmp.dZGP88FUi1 ++ return 0 + int_secret_pass='F9GCEGP!OedPfBIE?ID' + [[ -z F9GCEGP!OedPfBIE?ID ]] + [[ F9GCEGP!OedPfBIE?ID != \F\9\G\C\E\G\P\!\O\e\d\P\f\B\I\E\?\I\D ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''F9GCEGP!OedPfBIE?ID'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''F9GCEGP!OedPfBIE?ID'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''F9GCEGP!OedPfBIE?ID'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''F9GCEGP!OedPfBIE?ID'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql /tmp/tmp.JFgyIkaORV/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.QBGgxajdXN +++ mktemp ++ local LAST_ERR=/tmp/tmp.0bp3ITUxcN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QBGgxajdXN ++ cat /tmp/tmp.0bp3ITUxcN ++ rm /tmp/tmp.QBGgxajdXN /tmp/tmp.0bp3ITUxcN ++ return 0 + secret_pass='7fl%DgMSKv$AU_Bg' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zry60hi9Vh +++ mktemp ++ local LAST_ERR=/tmp/tmp.sqtMbLlj4O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zry60hi9Vh ++ cat /tmp/tmp.sqtMbLlj4O ++ rm /tmp/tmp.Zry60hi9Vh /tmp/tmp.sqtMbLlj4O ++ return 0 + int_secret_pass='7fl%DgMSKv$AU_Bg' + [[ -z 7fl%DgMSKv$AU_Bg ]] + [[ 7fl%DgMSKv$AU_Bg != \7\f\l\%\D\g\M\S\K\v\$\A\U\_\B\g ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''7fl%DgMSKv$AU_Bg'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''7fl%DgMSKv$AU_Bg'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''7fl%DgMSKv$AU_Bg'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''7fl%DgMSKv$AU_Bg'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bQwr1juMAA +++ mktemp ++ local LAST_ERR=/tmp/tmp.mAx9b5vd9A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bQwr1juMAA ++ cat /tmp/tmp.mAx9b5vd9A ++ rm /tmp/tmp.bQwr1juMAA /tmp/tmp.mAx9b5vd9A ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.JFgyIkaORV/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.KqbkgmuUCO +++ mktemp ++ local LAST_ERR=/tmp/tmp.ocHor3RKHF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KqbkgmuUCO ++ cat /tmp/tmp.ocHor3RKHF ++ rm /tmp/tmp.KqbkgmuUCO /tmp/tmp.ocHor3RKHF ++ return 0 + secret_pass='>ood]@jDva=>Ate8d(J' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.fRiaFqbi05 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8zzUU6fMBn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fRiaFqbi05 ++ cat /tmp/tmp.8zzUU6fMBn ++ rm /tmp/tmp.fRiaFqbi05 /tmp/tmp.8zzUU6fMBn ++ return 0 + int_secret_pass='>ood]@jDva=>Ate8d(J' + [[ -z >ood]@jDva=>Ate8d(J ]] + [[ >ood]@jDva=>Ate8d(J != \>\o\o\d\]\@\j\D\v\a\=\>\A\t\e\8\d\(\J ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''>ood]@jDva=>Ate8d(J'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''>ood]@jDva=>Ate8d(J'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''>ood]@jDva=>Ate8d(J'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''>ood]@jDva=>Ate8d(J'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4I1VGkit9u +++ mktemp ++ local LAST_ERR=/tmp/tmp.FAM7l91kKW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4I1VGkit9u ++ cat /tmp/tmp.FAM7l91kKW ++ rm /tmp/tmp.4I1VGkit9u /tmp/tmp.FAM7l91kKW ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.JFgyIkaORV/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.AumoqehmU4 ++ mktemp + local LAST_ERR=/tmp/tmp.ftrjIGz4qZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AumoqehmU4 secret/my-cluster-secrets patched + cat /tmp/tmp.ftrjIGz4qZ + rm /tmp/tmp.AumoqehmU4 /tmp/tmp.ftrjIGz4qZ + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1jjThI3VzX +++ mktemp ++ local LAST_ERR=/tmp/tmp.PuPaEkhvbe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1jjThI3VzX ++ cat /tmp/tmp.PuPaEkhvbe ++ rm /tmp/tmp.1jjThI3VzX /tmp/tmp.PuPaEkhvbe ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.JFgyIkaORV/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.NhVmIZpxGY ++ mktemp + local LAST_ERR=/tmp/tmp.BHEEk72bqF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NhVmIZpxGY perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.BHEEk72bqF + rm /tmp/tmp.NhVmIZpxGY /tmp/tmp.BHEEk72bqF + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WCeHS4jmJY +++ mktemp ++ local LAST_ERR=/tmp/tmp.p2ZGu8Nm7w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WCeHS4jmJY ++ cat /tmp/tmp.p2ZGu8Nm7w ++ rm /tmp/tmp.WCeHS4jmJY /tmp/tmp.p2ZGu8Nm7w ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ukBvDyqbVi +++ mktemp ++ local LAST_ERR=/tmp/tmp.itiBs8zFy0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ukBvDyqbVi ++ cat /tmp/tmp.itiBs8zFy0 ++ rm /tmp/tmp.ukBvDyqbVi /tmp/tmp.itiBs8zFy0 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JPdbaBYku4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.5KMYCCBQkE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JPdbaBYku4 +++++ cat /tmp/tmp.5KMYCCBQkE +++++ rm /tmp/tmp.JPdbaBYku4 /tmp/tmp.5KMYCCBQkE +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.sazpeYhoAx ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.0ExpeW62EK +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.sazpeYhoAx +++++ cat /tmp/tmp.0ExpeW62EK +++++ rm /tmp/tmp.sazpeYhoAx /tmp/tmp.0ExpeW62EK +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kY1LGQN44i +++ mktemp ++ local LAST_ERR=/tmp/tmp.pAidd7n2fm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kY1LGQN44i ++ cat /tmp/tmp.pAidd7n2fm ++ rm /tmp/tmp.kY1LGQN44i /tmp/tmp.pAidd7n2fm ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.KckRR99AMe ++ mktemp + local LAST_ERR=/tmp/tmp.x3frR2WrLZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KckRR99AMe secret/my-cluster-secrets patched + cat /tmp/tmp.x3frR2WrLZ + rm /tmp/tmp.KckRR99AMe /tmp/tmp.x3frR2WrLZ + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pf9lZNbXAf +++ mktemp ++ local LAST_ERR=/tmp/tmp.Zykpeh6UWY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pf9lZNbXAf ++ cat /tmp/tmp.Zykpeh6UWY ++ rm /tmp/tmp.pf9lZNbXAf /tmp/tmp.Zykpeh6UWY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uxdbXjO6wP +++ mktemp ++ local LAST_ERR=/tmp/tmp.zFaG92cWs9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uxdbXjO6wP ++ cat /tmp/tmp.zFaG92cWs9 ++ rm /tmp/tmp.uxdbXjO6wP /tmp/tmp.zFaG92cWs9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d76tgj40RG +++ mktemp ++ local LAST_ERR=/tmp/tmp.JBOAV04SrS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d76tgj40RG ++ cat /tmp/tmp.JBOAV04SrS ++ rm /tmp/tmp.d76tgj40RG /tmp/tmp.JBOAV04SrS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4RXb7RlcIu +++ mktemp ++ local LAST_ERR=/tmp/tmp.PDFc5s9q5C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4RXb7RlcIu ++ cat /tmp/tmp.PDFc5s9q5C ++ rm /tmp/tmp.4RXb7RlcIu /tmp/tmp.PDFc5s9q5C ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GtK3GsIJnB +++ mktemp ++ local LAST_ERR=/tmp/tmp.CbbFfQfZqt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GtK3GsIJnB ++ cat /tmp/tmp.CbbFfQfZqt ++ rm /tmp/tmp.GtK3GsIJnB /tmp/tmp.CbbFfQfZqt ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mHhxI3EIax ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.u7SmMA7BL9 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mHhxI3EIax +++++ cat /tmp/tmp.u7SmMA7BL9 +++++ rm /tmp/tmp.mHhxI3EIax /tmp/tmp.u7SmMA7BL9 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RKmrK8Amcx ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.WSEsr0Jm0Y +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RKmrK8Amcx +++++ cat /tmp/tmp.WSEsr0Jm0Y +++++ rm /tmp/tmp.RKmrK8Amcx /tmp/tmp.WSEsr0Jm0Y +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9pNP2nY9yi +++ mktemp ++ local LAST_ERR=/tmp/tmp.SmJsHrlvYk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9pNP2nY9yi ++ cat /tmp/tmp.SmJsHrlvYk ++ rm /tmp/tmp.9pNP2nY9yi /tmp/tmp.SmJsHrlvYk ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql /tmp/tmp.JFgyIkaORV/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql /tmp/tmp.JFgyIkaORV/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql /tmp/tmp.JFgyIkaORV/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jyukps7M8z ++ mktemp + local LAST_ERR=/tmp/tmp.MJt01EDNJq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jyukps7M8z perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.MJt01EDNJq + rm /tmp/tmp.jyukps7M8z /tmp/tmp.MJt01EDNJq + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Z2Vb5kCvHl ++ mktemp + local LAST_ERR=/tmp/tmp.Gqx5UXtCm2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Z2Vb5kCvHl secret/my-cluster-secrets patched + cat /tmp/tmp.Gqx5UXtCm2 + rm /tmp/tmp.Z2Vb5kCvHl /tmp/tmp.Gqx5UXtCm2 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XMxLEWDo23 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aMsJ2Lh35V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XMxLEWDo23 ++ cat /tmp/tmp.aMsJ2Lh35V ++ rm /tmp/tmp.XMxLEWDo23 /tmp/tmp.aMsJ2Lh35V ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p2XDcNuC7l +++ mktemp ++ local LAST_ERR=/tmp/tmp.0Zd0FFg5md ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p2XDcNuC7l ++ cat /tmp/tmp.0Zd0FFg5md ++ rm /tmp/tmp.p2XDcNuC7l /tmp/tmp.0Zd0FFg5md ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lVBTlqqOKk +++ mktemp ++ local LAST_ERR=/tmp/tmp.Stvggw9toT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lVBTlqqOKk ++ cat /tmp/tmp.Stvggw9toT ++ rm /tmp/tmp.lVBTlqqOKk /tmp/tmp.Stvggw9toT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zTnWbM0HLu +++ mktemp ++ local LAST_ERR=/tmp/tmp.z6VTPvYbo0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zTnWbM0HLu ++ cat /tmp/tmp.z6VTPvYbo0 ++ rm /tmp/tmp.zTnWbM0HLu /tmp/tmp.z6VTPvYbo0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5a7dgVw2il +++ mktemp ++ local LAST_ERR=/tmp/tmp.BWze0dLkI1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5a7dgVw2il ++ cat /tmp/tmp.BWze0dLkI1 ++ rm /tmp/tmp.5a7dgVw2il /tmp/tmp.BWze0dLkI1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rKXrtEPe5P +++ mktemp ++ local LAST_ERR=/tmp/tmp.LJAzN30PYe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rKXrtEPe5P ++ cat /tmp/tmp.LJAzN30PYe ++ rm /tmp/tmp.rKXrtEPe5P /tmp/tmp.LJAzN30PYe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RDUfQDFC1A +++ mktemp ++ local LAST_ERR=/tmp/tmp.xTBB3BGiHQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RDUfQDFC1A ++ cat /tmp/tmp.xTBB3BGiHQ ++ rm /tmp/tmp.RDUfQDFC1A /tmp/tmp.xTBB3BGiHQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rGffUvAkLo +++ mktemp ++ local LAST_ERR=/tmp/tmp.XTxhTv23Cb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rGffUvAkLo ++ cat /tmp/tmp.XTxhTv23Cb ++ rm /tmp/tmp.rGffUvAkLo /tmp/tmp.XTxhTv23Cb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BWOtOa5SI1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6HvIqcTd3c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BWOtOa5SI1 ++ cat /tmp/tmp.6HvIqcTd3c ++ rm /tmp/tmp.BWOtOa5SI1 /tmp/tmp.6HvIqcTd3c ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yLkATDYYeQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ypMsnNF3X8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yLkATDYYeQ ++ cat /tmp/tmp.ypMsnNF3X8 ++ rm /tmp/tmp.yLkATDYYeQ /tmp/tmp.ypMsnNF3X8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4BFWxLAKkO +++ mktemp ++ local LAST_ERR=/tmp/tmp.xMynLUwAQ4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4BFWxLAKkO ++ cat /tmp/tmp.xMynLUwAQ4 ++ rm /tmp/tmp.4BFWxLAKkO /tmp/tmp.xMynLUwAQ4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vPWMXRp5u0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.czGCzI89jQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vPWMXRp5u0 ++ cat /tmp/tmp.czGCzI89jQ ++ rm /tmp/tmp.vPWMXRp5u0 /tmp/tmp.czGCzI89jQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AHfvOfKB37 +++ mktemp ++ local LAST_ERR=/tmp/tmp.TIEGbw68NC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AHfvOfKB37 ++ cat /tmp/tmp.TIEGbw68NC ++ rm /tmp/tmp.AHfvOfKB37 /tmp/tmp.TIEGbw68NC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9DnnydQWy1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.iMTvERazfx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9DnnydQWy1 ++ cat /tmp/tmp.iMTvERazfx ++ rm /tmp/tmp.9DnnydQWy1 /tmp/tmp.iMTvERazfx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1j2vhyr834 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BYOnmun7fa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1j2vhyr834 ++ cat /tmp/tmp.BYOnmun7fa ++ rm /tmp/tmp.1j2vhyr834 /tmp/tmp.BYOnmun7fa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.le06RmWmAu +++ mktemp ++ local LAST_ERR=/tmp/tmp.YM7MEjxCo0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.le06RmWmAu ++ cat /tmp/tmp.YM7MEjxCo0 ++ rm /tmp/tmp.le06RmWmAu /tmp/tmp.YM7MEjxCo0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mXlU6BFB47 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VoUM5Go1Sd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mXlU6BFB47 ++ cat /tmp/tmp.VoUM5Go1Sd ++ rm /tmp/tmp.mXlU6BFB47 /tmp/tmp.VoUM5Go1Sd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xqkU2Ndpnu +++ mktemp ++ local LAST_ERR=/tmp/tmp.WJheLVzu4p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xqkU2Ndpnu ++ cat /tmp/tmp.WJheLVzu4p ++ rm /tmp/tmp.xqkU2Ndpnu /tmp/tmp.WJheLVzu4p ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Kl7sO6dg1X +++ mktemp ++ local LAST_ERR=/tmp/tmp.TOlVe5R3Hd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Kl7sO6dg1X ++ cat /tmp/tmp.TOlVe5R3Hd ++ rm /tmp/tmp.Kl7sO6dg1X /tmp/tmp.TOlVe5R3Hd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pWXcQ41JAx +++ mktemp ++ local LAST_ERR=/tmp/tmp.qJh0hktunH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pWXcQ41JAx ++ cat /tmp/tmp.qJh0hktunH ++ rm /tmp/tmp.pWXcQ41JAx /tmp/tmp.qJh0hktunH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GtK0Wt641Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rz0eD58zkp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GtK0Wt641Y ++ cat /tmp/tmp.Rz0eD58zkp ++ rm /tmp/tmp.GtK0Wt641Y /tmp/tmp.Rz0eD58zkp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z4avDchtBo +++ mktemp ++ local LAST_ERR=/tmp/tmp.NWPKg3p65R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z4avDchtBo ++ cat /tmp/tmp.NWPKg3p65R ++ rm /tmp/tmp.Z4avDchtBo /tmp/tmp.NWPKg3p65R ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3gTM3Fw0qz +++ mktemp ++ local LAST_ERR=/tmp/tmp.YHjYs9cEAM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3gTM3Fw0qz ++ cat /tmp/tmp.YHjYs9cEAM ++ rm /tmp/tmp.3gTM3Fw0qz /tmp/tmp.YHjYs9cEAM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2EnKkSVoJM +++ mktemp ++ local LAST_ERR=/tmp/tmp.pNlwxCYS3m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2EnKkSVoJM ++ cat /tmp/tmp.pNlwxCYS3m ++ rm /tmp/tmp.2EnKkSVoJM /tmp/tmp.pNlwxCYS3m ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WUZdX6CNUM +++ mktemp ++ local LAST_ERR=/tmp/tmp.ANp3vDdZZb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WUZdX6CNUM ++ cat /tmp/tmp.ANp3vDdZZb ++ rm /tmp/tmp.WUZdX6CNUM /tmp/tmp.ANp3vDdZZb ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YuFgNu8WHx +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hx7GC6BRkf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YuFgNu8WHx ++ cat /tmp/tmp.Hx7GC6BRkf ++ rm /tmp/tmp.YuFgNu8WHx /tmp/tmp.Hx7GC6BRkf ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GXwq6ihGLr ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UR9IVM3jXn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GXwq6ihGLr +++++ cat /tmp/tmp.UR9IVM3jXn +++++ rm /tmp/tmp.GXwq6ihGLr /tmp/tmp.UR9IVM3jXn +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.v7J7bPnAdQ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.i5s6ks57Oe +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.v7J7bPnAdQ +++++ cat /tmp/tmp.i5s6ks57Oe +++++ rm /tmp/tmp.v7J7bPnAdQ /tmp/tmp.i5s6ks57Oe +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cIQlJwXxUf +++ mktemp ++ local LAST_ERR=/tmp/tmp.0eORKHqKrG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cIQlJwXxUf ++ cat /tmp/tmp.0eORKHqKrG ++ rm /tmp/tmp.cIQlJwXxUf /tmp/tmp.0eORKHqKrG ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3.sql /tmp/tmp.JFgyIkaORV/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.nzLtv7cjpn ++ mktemp + local LAST_ERR=/tmp/tmp.q8QNFUMTN9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nzLtv7cjpn secret/my-cluster-secrets patched + cat /tmp/tmp.q8QNFUMTN9 + rm /tmp/tmp.nzLtv7cjpn /tmp/tmp.q8QNFUMTN9 + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.zb5XuWAK70 +++ mktemp ++ local LAST_ERR=/tmp/tmp.QyV7qxSHEn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zb5XuWAK70 ++ cat /tmp/tmp.QyV7qxSHEn ++ rm /tmp/tmp.zb5XuWAK70 /tmp/tmp.QyV7qxSHEn ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lCvZnk6qgL +++ mktemp ++ local LAST_ERR=/tmp/tmp.o35dpx6vHV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lCvZnk6qgL ++ cat /tmp/tmp.o35dpx6vHV ++ rm /tmp/tmp.lCvZnk6qgL /tmp/tmp.o35dpx6vHV ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nXtuC7Cw5k +++ mktemp ++ local LAST_ERR=/tmp/tmp.BtfBNFF8GX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nXtuC7Cw5k ++ cat /tmp/tmp.BtfBNFF8GX ++ rm /tmp/tmp.nXtuC7Cw5k /tmp/tmp.BtfBNFF8GX ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3ch1syhnQj +++ mktemp ++ local LAST_ERR=/tmp/tmp.HblXjQ7iyT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3ch1syhnQj ++ cat /tmp/tmp.HblXjQ7iyT ++ rm /tmp/tmp.3ch1syhnQj /tmp/tmp.HblXjQ7iyT ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PqQbIx4M8T +++ mktemp ++ local LAST_ERR=/tmp/tmp.9qWYkShVmb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PqQbIx4M8T ++ cat /tmp/tmp.9qWYkShVmb ++ rm /tmp/tmp.PqQbIx4M8T /tmp/tmp.9qWYkShVmb ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A3aajyu2YJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.303vNw4dmm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.A3aajyu2YJ ++ cat /tmp/tmp.303vNw4dmm ++ rm /tmp/tmp.A3aajyu2YJ /tmp/tmp.303vNw4dmm ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HmRWKsd3pv +++ mktemp ++ local LAST_ERR=/tmp/tmp.4lylnpt3gz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HmRWKsd3pv ++ cat /tmp/tmp.4lylnpt3gz ++ rm /tmp/tmp.HmRWKsd3pv /tmp/tmp.4lylnpt3gz ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rnkkuYI3Ac +++ mktemp ++ local LAST_ERR=/tmp/tmp.z0Ipvm8TKX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rnkkuYI3Ac ++ cat /tmp/tmp.z0Ipvm8TKX ++ rm /tmp/tmp.rnkkuYI3Ac /tmp/tmp.z0Ipvm8TKX ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ludr2wigAx +++ mktemp ++ local LAST_ERR=/tmp/tmp.P4dN0xl7Wf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ludr2wigAx ++ cat /tmp/tmp.P4dN0xl7Wf ++ rm /tmp/tmp.Ludr2wigAx /tmp/tmp.P4dN0xl7Wf ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B8atFxopq9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EpZbC1Ftnm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B8atFxopq9 ++ cat /tmp/tmp.EpZbC1Ftnm ++ rm /tmp/tmp.B8atFxopq9 /tmp/tmp.EpZbC1Ftnm ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z1VkOsxyYq +++ mktemp ++ local LAST_ERR=/tmp/tmp.emlU3ZQ0DB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z1VkOsxyYq ++ cat /tmp/tmp.emlU3ZQ0DB ++ rm /tmp/tmp.z1VkOsxyYq /tmp/tmp.emlU3ZQ0DB ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iby85k85zG +++ mktemp ++ local LAST_ERR=/tmp/tmp.xaxmeGiFqD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iby85k85zG ++ cat /tmp/tmp.xaxmeGiFqD ++ rm /tmp/tmp.iby85k85zG /tmp/tmp.xaxmeGiFqD ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o5cmwreSr7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1n9sXePSiW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o5cmwreSr7 ++ cat /tmp/tmp.1n9sXePSiW ++ rm /tmp/tmp.o5cmwreSr7 /tmp/tmp.1n9sXePSiW ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sBmMu8Lysu +++ mktemp ++ local LAST_ERR=/tmp/tmp.AUrrTkH3xf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sBmMu8Lysu ++ cat /tmp/tmp.AUrrTkH3xf ++ rm /tmp/tmp.sBmMu8Lysu /tmp/tmp.AUrrTkH3xf ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3GqpWra019 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2oLvMJvBNc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3GqpWra019 ++ cat /tmp/tmp.2oLvMJvBNc ++ rm /tmp/tmp.3GqpWra019 /tmp/tmp.2oLvMJvBNc ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 13 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jc7NrkVz5G +++ mktemp ++ local LAST_ERR=/tmp/tmp.YYLWXHR2t0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jc7NrkVz5G ++ cat /tmp/tmp.YYLWXHR2t0 ++ rm /tmp/tmp.jc7NrkVz5G /tmp/tmp.YYLWXHR2t0 ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZplzL861d7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.F6k0TtpCxw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZplzL861d7 ++ cat /tmp/tmp.F6k0TtpCxw ++ rm /tmp/tmp.ZplzL861d7 /tmp/tmp.F6k0TtpCxw ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bIhAuBWRzZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.89GP9re0j7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bIhAuBWRzZ ++ cat /tmp/tmp.89GP9re0j7 ++ rm /tmp/tmp.bIhAuBWRzZ /tmp/tmp.89GP9re0j7 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.QM06mDc2p5 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ydzqAEUALv +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.QM06mDc2p5 +++++ cat /tmp/tmp.ydzqAEUALv +++++ rm /tmp/tmp.QM06mDc2p5 /tmp/tmp.ydzqAEUALv +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GCsXnfyirO ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.oeWmzZFfnd +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GCsXnfyirO +++++ cat /tmp/tmp.oeWmzZFfnd +++++ rm /tmp/tmp.GCsXnfyirO /tmp/tmp.oeWmzZFfnd +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0XHVBXT9oc +++ mktemp ++ local LAST_ERR=/tmp/tmp.W0hbWqEdpn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0XHVBXT9oc ++ cat /tmp/tmp.W0hbWqEdpn ++ rm /tmp/tmp.0XHVBXT9oc /tmp/tmp.W0hbWqEdpn ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SfaDIbGxG0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qmKFOosmVj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SfaDIbGxG0 ++ cat /tmp/tmp.qmKFOosmVj ++ rm /tmp/tmp.SfaDIbGxG0 /tmp/tmp.qmKFOosmVj ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.JFgyIkaORV/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.9NSevIXbjs ++ mktemp + local LAST_ERR=/tmp/tmp.Lm316ugF9n + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9NSevIXbjs secret/my-cluster-secrets patched + cat /tmp/tmp.Lm316ugF9n + rm /tmp/tmp.9NSevIXbjs /tmp/tmp.Lm316ugF9n + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u5sl4pDZlv +++ mktemp ++ local LAST_ERR=/tmp/tmp.DS8pOcqpap ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u5sl4pDZlv ++ cat /tmp/tmp.DS8pOcqpap ++ rm /tmp/tmp.u5sl4pDZlv /tmp/tmp.DS8pOcqpap ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1HgDj6Ltc9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.CvcKrQmzP7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1HgDj6Ltc9 ++ cat /tmp/tmp.CvcKrQmzP7 ++ rm /tmp/tmp.1HgDj6Ltc9 /tmp/tmp.CvcKrQmzP7 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6VeePFtgpQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.vPkiukMUqz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6VeePFtgpQ ++ cat /tmp/tmp.vPkiukMUqz ++ rm /tmp/tmp.6VeePFtgpQ /tmp/tmp.vPkiukMUqz ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.VYBkuDVuT4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.cRzsIJj9xV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.VYBkuDVuT4 +++++ cat /tmp/tmp.cRzsIJj9xV +++++ rm /tmp/tmp.VYBkuDVuT4 /tmp/tmp.cRzsIJj9xV +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.DSwetCmw7x ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.3zrnfzIWoE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.DSwetCmw7x +++++ cat /tmp/tmp.3zrnfzIWoE +++++ rm /tmp/tmp.DSwetCmw7x /tmp/tmp.3zrnfzIWoE +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CUhHROpq1e +++ mktemp ++ local LAST_ERR=/tmp/tmp.JzbVW5jXpD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CUhHROpq1e ++ cat /tmp/tmp.JzbVW5jXpD ++ rm /tmp/tmp.CUhHROpq1e /tmp/tmp.JzbVW5jXpD ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oKywsoA1FD +++ mktemp ++ local LAST_ERR=/tmp/tmp.bYUKNpO9ZP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oKywsoA1FD ++ cat /tmp/tmp.bYUKNpO9ZP ++ rm /tmp/tmp.oKywsoA1FD /tmp/tmp.bYUKNpO9ZP ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.JFgyIkaORV/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.QVYFOl1BXS ++ mktemp + local LAST_ERR=/tmp/tmp.765VY88zPm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QVYFOl1BXS perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.765VY88zPm + rm /tmp/tmp.QVYFOl1BXS /tmp/tmp.765VY88zPm + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1k9m7YPm4g +++ mktemp ++ local LAST_ERR=/tmp/tmp.X5R0UifnCf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1k9m7YPm4g ++ cat /tmp/tmp.X5R0UifnCf ++ rm /tmp/tmp.1k9m7YPm4g /tmp/tmp.X5R0UifnCf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ViXp1wzYIp +++ mktemp ++ local LAST_ERR=/tmp/tmp.PnsDljRnRm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ViXp1wzYIp ++ cat /tmp/tmp.PnsDljRnRm ++ rm /tmp/tmp.ViXp1wzYIp /tmp/tmp.PnsDljRnRm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8iwsefm3EJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.RdEPvdnnaE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8iwsefm3EJ ++ cat /tmp/tmp.RdEPvdnnaE ++ rm /tmp/tmp.8iwsefm3EJ /tmp/tmp.RdEPvdnnaE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NsrpmqbdOS +++ mktemp ++ local LAST_ERR=/tmp/tmp.AUPiEzmQ8i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NsrpmqbdOS ++ cat /tmp/tmp.AUPiEzmQ8i ++ rm /tmp/tmp.NsrpmqbdOS /tmp/tmp.AUPiEzmQ8i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b6uFpHqhSn +++ mktemp ++ local LAST_ERR=/tmp/tmp.isHrS0eVCE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b6uFpHqhSn ++ cat /tmp/tmp.isHrS0eVCE ++ rm /tmp/tmp.b6uFpHqhSn /tmp/tmp.isHrS0eVCE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Nc4X8zm7wO +++ mktemp ++ local LAST_ERR=/tmp/tmp.leDvDPW5rP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Nc4X8zm7wO ++ cat /tmp/tmp.leDvDPW5rP ++ rm /tmp/tmp.Nc4X8zm7wO /tmp/tmp.leDvDPW5rP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zZ3hTOz0W3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.5ubVNZyvfJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zZ3hTOz0W3 ++ cat /tmp/tmp.5ubVNZyvfJ ++ rm /tmp/tmp.zZ3hTOz0W3 /tmp/tmp.5ubVNZyvfJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L2SYMKPzpJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ycn0Ff24jk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L2SYMKPzpJ ++ cat /tmp/tmp.Ycn0Ff24jk ++ rm /tmp/tmp.L2SYMKPzpJ /tmp/tmp.Ycn0Ff24jk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q0zqKH934Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.OkcgvN08Rv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q0zqKH934Z ++ cat /tmp/tmp.OkcgvN08Rv ++ rm /tmp/tmp.q0zqKH934Z /tmp/tmp.OkcgvN08Rv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GrMwU8nXBY +++ mktemp ++ local LAST_ERR=/tmp/tmp.3ysaKp1V7D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GrMwU8nXBY ++ cat /tmp/tmp.3ysaKp1V7D ++ rm /tmp/tmp.GrMwU8nXBY /tmp/tmp.3ysaKp1V7D ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t9PN5EwR5x +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ff5ElqLdFW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t9PN5EwR5x ++ cat /tmp/tmp.Ff5ElqLdFW ++ rm /tmp/tmp.t9PN5EwR5x /tmp/tmp.Ff5ElqLdFW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OjzrW8ynPT +++ mktemp ++ local LAST_ERR=/tmp/tmp.GSqPVNNaEz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OjzrW8ynPT ++ cat /tmp/tmp.GSqPVNNaEz ++ rm /tmp/tmp.OjzrW8ynPT /tmp/tmp.GSqPVNNaEz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jo36pJ2LgM +++ mktemp ++ local LAST_ERR=/tmp/tmp.YZcTckC6DX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Jo36pJ2LgM ++ cat /tmp/tmp.YZcTckC6DX ++ rm /tmp/tmp.Jo36pJ2LgM /tmp/tmp.YZcTckC6DX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mkKpWLoQYY +++ mktemp ++ local LAST_ERR=/tmp/tmp.G2lFD03Cf2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mkKpWLoQYY ++ cat /tmp/tmp.G2lFD03Cf2 ++ rm /tmp/tmp.mkKpWLoQYY /tmp/tmp.G2lFD03Cf2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jkGm4POTl7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ipzjrmGycN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jkGm4POTl7 ++ cat /tmp/tmp.ipzjrmGycN ++ rm /tmp/tmp.jkGm4POTl7 /tmp/tmp.ipzjrmGycN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5skeClZZO9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.UcAgCJOcos ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5skeClZZO9 ++ cat /tmp/tmp.UcAgCJOcos ++ rm /tmp/tmp.5skeClZZO9 /tmp/tmp.UcAgCJOcos ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WDatPhiGA2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aWeVFPod7z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WDatPhiGA2 ++ cat /tmp/tmp.aWeVFPod7z ++ rm /tmp/tmp.WDatPhiGA2 /tmp/tmp.aWeVFPod7z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EDkHyDqf6R +++ mktemp ++ local LAST_ERR=/tmp/tmp.rauVqA51Jp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EDkHyDqf6R ++ cat /tmp/tmp.rauVqA51Jp ++ rm /tmp/tmp.EDkHyDqf6R /tmp/tmp.rauVqA51Jp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TQok56jmK8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.OOLwLTrWLl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TQok56jmK8 ++ cat /tmp/tmp.OOLwLTrWLl ++ rm /tmp/tmp.TQok56jmK8 /tmp/tmp.OOLwLTrWLl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nTizoXrPnS +++ mktemp ++ local LAST_ERR=/tmp/tmp.WyabLIGMUH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nTizoXrPnS ++ cat /tmp/tmp.WyabLIGMUH ++ rm /tmp/tmp.nTizoXrPnS /tmp/tmp.WyabLIGMUH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eRFsf1zuud +++ mktemp ++ local LAST_ERR=/tmp/tmp.QEoVjf5snh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eRFsf1zuud ++ cat /tmp/tmp.QEoVjf5snh ++ rm /tmp/tmp.eRFsf1zuud /tmp/tmp.QEoVjf5snh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r6iWOk7omg +++ mktemp ++ local LAST_ERR=/tmp/tmp.KMMoBbxUEn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r6iWOk7omg ++ cat /tmp/tmp.KMMoBbxUEn ++ rm /tmp/tmp.r6iWOk7omg /tmp/tmp.KMMoBbxUEn ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pEhK8l9oyv +++ mktemp ++ local LAST_ERR=/tmp/tmp.cmU2OM7gnt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pEhK8l9oyv ++ cat /tmp/tmp.cmU2OM7gnt ++ rm /tmp/tmp.pEhK8l9oyv /tmp/tmp.cmU2OM7gnt ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.CiOiuesJPS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Jw81C4A8BH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.CiOiuesJPS +++++ cat /tmp/tmp.Jw81C4A8BH +++++ rm /tmp/tmp.CiOiuesJPS /tmp/tmp.Jw81C4A8BH +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.zgtKpeuxN4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.HxQ2GBVJoa +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.zgtKpeuxN4 +++++ cat /tmp/tmp.HxQ2GBVJoa +++++ rm /tmp/tmp.zgtKpeuxN4 /tmp/tmp.HxQ2GBVJoa +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5Apr4HjlvO +++ mktemp ++ local LAST_ERR=/tmp/tmp.eqOYmlF9WB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5Apr4HjlvO ++ cat /tmp/tmp.eqOYmlF9WB ++ rm /tmp/tmp.5Apr4HjlvO /tmp/tmp.eqOYmlF9WB ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jQubT2a7Ol ++ mktemp + local LAST_ERR=/tmp/tmp.7LBw7Pqgjn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jQubT2a7Ol secret/my-cluster-secrets-2 patched + cat /tmp/tmp.7LBw7Pqgjn + rm /tmp/tmp.jQubT2a7Ol /tmp/tmp.7LBw7Pqgjn + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ThLWlB3SsY +++ mktemp ++ local LAST_ERR=/tmp/tmp.jjoGqTRmvc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ThLWlB3SsY ++ cat /tmp/tmp.jjoGqTRmvc ++ rm /tmp/tmp.ThLWlB3SsY /tmp/tmp.jjoGqTRmvc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5VpcoULFey +++ mktemp ++ local LAST_ERR=/tmp/tmp.k5jGWuaujv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5VpcoULFey ++ cat /tmp/tmp.k5jGWuaujv ++ rm /tmp/tmp.5VpcoULFey /tmp/tmp.k5jGWuaujv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tyWNWqrld8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.5IvnlhnmfR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tyWNWqrld8 ++ cat /tmp/tmp.5IvnlhnmfR ++ rm /tmp/tmp.tyWNWqrld8 /tmp/tmp.5IvnlhnmfR ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pm8I0PQgNQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.DgIjaHIeTA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pm8I0PQgNQ ++ cat /tmp/tmp.DgIjaHIeTA ++ rm /tmp/tmp.pm8I0PQgNQ /tmp/tmp.DgIjaHIeTA ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.BapOUhvouj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Lj1hYLZFjV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.BapOUhvouj +++++ cat /tmp/tmp.Lj1hYLZFjV +++++ rm /tmp/tmp.BapOUhvouj /tmp/tmp.Lj1hYLZFjV +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.dBJiVgRt6B ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Dl1CTzmwm0 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.dBJiVgRt6B +++++ cat /tmp/tmp.Dl1CTzmwm0 +++++ rm /tmp/tmp.dBJiVgRt6B /tmp/tmp.Dl1CTzmwm0 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uo5hl8Wz7M +++ mktemp ++ local LAST_ERR=/tmp/tmp.r8I6KHCDPW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uo5hl8Wz7M ++ cat /tmp/tmp.r8I6KHCDPW ++ rm /tmp/tmp.uo5hl8Wz7M /tmp/tmp.r8I6KHCDPW ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pxrS6WRtDe +++ mktemp ++ local LAST_ERR=/tmp/tmp.bpxh0xm19O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pxrS6WRtDe ++ cat /tmp/tmp.bpxh0xm19O ++ rm /tmp/tmp.pxrS6WRtDe /tmp/tmp.bpxh0xm19O ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.JFgyIkaORV/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.jfXWxnfpGj +++ mktemp ++ local LAST_ERR=/tmp/tmp.39SsgCm8TT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jfXWxnfpGj ++ cat /tmp/tmp.39SsgCm8TT ++ rm /tmp/tmp.jfXWxnfpGj /tmp/tmp.39SsgCm8TT ++ return 0 + newpass='(C?]Msc?E' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''(C?]Msc?E'\'';' '-h some-name-pxc -uroot -p'\''(C?]Msc?E'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''(C?]Msc?E'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''(C?]Msc?E'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h1sNkMywH2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fC2aSKvDhE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h1sNkMywH2 ++ cat /tmp/tmp.fC2aSKvDhE ++ rm /tmp/tmp.h1sNkMywH2 /tmp/tmp.fC2aSKvDhE ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''(C?]Msc?E'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''(C?]Msc?E'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''(C?]Msc?E'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''(C?]Msc?E'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4XH03obTlG +++ mktemp ++ local LAST_ERR=/tmp/tmp.4GaES6VYs4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4XH03obTlG ++ cat /tmp/tmp.4GaES6VYs4 ++ rm /tmp/tmp.4XH03obTlG /tmp/tmp.4GaES6VYs4 ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.JFgyIkaORV/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.sFxTiv4ECX +++ mktemp ++ local LAST_ERR=/tmp/tmp.E6pxJ8Ofyk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sFxTiv4ECX ++ cat /tmp/tmp.E6pxJ8Ofyk ++ rm /tmp/tmp.sFxTiv4ECX /tmp/tmp.E6pxJ8Ofyk ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.V4WrMYLnU8 ++ mktemp + local LAST_ERR=/tmp/tmp.Vr2TiwcBjU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.V4WrMYLnU8 secret/my-cluster-secrets-2 configured + cat /tmp/tmp.Vr2TiwcBjU Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.V4WrMYLnU8 /tmp/tmp.Vr2TiwcBjU + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YFmN7gP0PI +++ mktemp ++ local LAST_ERR=/tmp/tmp.JBXD4JzIuB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YFmN7gP0PI ++ cat /tmp/tmp.JBXD4JzIuB ++ rm /tmp/tmp.YFmN7gP0PI /tmp/tmp.JBXD4JzIuB ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.JFgyIkaORV/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.ethE4IQFws + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2154-7a623b10#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-24200~ + local LAST_ERR=/tmp/tmp.SZQJ2A0I9e + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ethE4IQFws perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.SZQJ2A0I9e + rm /tmp/tmp.ethE4IQFws /tmp/tmp.SZQJ2A0I9e + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.STFiQb9a5i +++ mktemp ++ local LAST_ERR=/tmp/tmp.uFAWyPLLJs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.STFiQb9a5i ++ cat /tmp/tmp.uFAWyPLLJs ++ rm /tmp/tmp.STFiQb9a5i /tmp/tmp.uFAWyPLLJs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YvLOV5ichN +++ mktemp ++ local LAST_ERR=/tmp/tmp.wIoNqvY562 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YvLOV5ichN ++ cat /tmp/tmp.wIoNqvY562 ++ rm /tmp/tmp.YvLOV5ichN /tmp/tmp.wIoNqvY562 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vnC84Cxj0X +++ mktemp ++ local LAST_ERR=/tmp/tmp.NYKRkb4ZRI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vnC84Cxj0X ++ cat /tmp/tmp.NYKRkb4ZRI ++ rm /tmp/tmp.vnC84Cxj0X /tmp/tmp.NYKRkb4ZRI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3oUbsgY2EP +++ mktemp ++ local LAST_ERR=/tmp/tmp.04yXXrXfTo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3oUbsgY2EP ++ cat /tmp/tmp.04yXXrXfTo ++ rm /tmp/tmp.3oUbsgY2EP /tmp/tmp.04yXXrXfTo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Onui6beiwc +++ mktemp ++ local LAST_ERR=/tmp/tmp.m1aO51Cp5L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Onui6beiwc ++ cat /tmp/tmp.m1aO51Cp5L ++ rm /tmp/tmp.Onui6beiwc /tmp/tmp.m1aO51Cp5L ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rRT18pt5Yg +++ mktemp ++ local LAST_ERR=/tmp/tmp.zYyETwI22S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rRT18pt5Yg ++ cat /tmp/tmp.zYyETwI22S ++ rm /tmp/tmp.rRT18pt5Yg /tmp/tmp.zYyETwI22S ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eNH1Top8n6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cCaJpt5ujq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eNH1Top8n6 ++ cat /tmp/tmp.cCaJpt5ujq ++ rm /tmp/tmp.eNH1Top8n6 /tmp/tmp.cCaJpt5ujq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vYqlgYycva +++ mktemp ++ local LAST_ERR=/tmp/tmp.tdl58tR3Ot ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vYqlgYycva ++ cat /tmp/tmp.tdl58tR3Ot ++ rm /tmp/tmp.vYqlgYycva /tmp/tmp.tdl58tR3Ot ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AAer7IFrkR +++ mktemp ++ local LAST_ERR=/tmp/tmp.H6h4VJA4Vk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AAer7IFrkR ++ cat /tmp/tmp.H6h4VJA4Vk ++ rm /tmp/tmp.AAer7IFrkR /tmp/tmp.H6h4VJA4Vk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.taG4wzhdJF +++ mktemp ++ local LAST_ERR=/tmp/tmp.rb1bR8Fprs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.taG4wzhdJF ++ cat /tmp/tmp.rb1bR8Fprs ++ rm /tmp/tmp.taG4wzhdJF /tmp/tmp.rb1bR8Fprs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HD0WyJQijE +++ mktemp ++ local LAST_ERR=/tmp/tmp.wYJ6eNXxKD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HD0WyJQijE ++ cat /tmp/tmp.wYJ6eNXxKD ++ rm /tmp/tmp.HD0WyJQijE /tmp/tmp.wYJ6eNXxKD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jpdkxCPxsZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.e7sAeAJjEg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jpdkxCPxsZ ++ cat /tmp/tmp.e7sAeAJjEg ++ rm /tmp/tmp.jpdkxCPxsZ /tmp/tmp.e7sAeAJjEg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FICTRS3UcQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.9g1xxce8gS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FICTRS3UcQ ++ cat /tmp/tmp.9g1xxce8gS ++ rm /tmp/tmp.FICTRS3UcQ /tmp/tmp.9g1xxce8gS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7CMmJrUuBd +++ mktemp ++ local LAST_ERR=/tmp/tmp.JmrxAfRASf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7CMmJrUuBd ++ cat /tmp/tmp.JmrxAfRASf ++ rm /tmp/tmp.7CMmJrUuBd /tmp/tmp.JmrxAfRASf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G66C1wAfGp +++ mktemp ++ local LAST_ERR=/tmp/tmp.ifXzchJCEL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G66C1wAfGp ++ cat /tmp/tmp.ifXzchJCEL ++ rm /tmp/tmp.G66C1wAfGp /tmp/tmp.ifXzchJCEL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aNX6N4vzV2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.i5ASQnR65k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aNX6N4vzV2 ++ cat /tmp/tmp.i5ASQnR65k ++ rm /tmp/tmp.aNX6N4vzV2 /tmp/tmp.i5ASQnR65k ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QIFuqC6WMp +++ mktemp ++ local LAST_ERR=/tmp/tmp.moNpgBXobS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QIFuqC6WMp ++ cat /tmp/tmp.moNpgBXobS ++ rm /tmp/tmp.QIFuqC6WMp /tmp/tmp.moNpgBXobS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F5txUy7q5X +++ mktemp ++ local LAST_ERR=/tmp/tmp.gFx2texIdz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F5txUy7q5X ++ cat /tmp/tmp.gFx2texIdz ++ rm /tmp/tmp.F5txUy7q5X /tmp/tmp.gFx2texIdz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RieNjgkILD +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z8jXQ5qzcK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RieNjgkILD ++ cat /tmp/tmp.Z8jXQ5qzcK ++ rm /tmp/tmp.RieNjgkILD /tmp/tmp.Z8jXQ5qzcK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HMNzZlHMxT +++ mktemp ++ local LAST_ERR=/tmp/tmp.aTSsWYN4je ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HMNzZlHMxT ++ cat /tmp/tmp.aTSsWYN4je ++ rm /tmp/tmp.HMNzZlHMxT /tmp/tmp.aTSsWYN4je ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kZpv6razse +++ mktemp ++ local LAST_ERR=/tmp/tmp.v3ZIzkh19S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kZpv6razse ++ cat /tmp/tmp.v3ZIzkh19S ++ rm /tmp/tmp.kZpv6razse /tmp/tmp.v3ZIzkh19S ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JsmUbcwpv9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.OJOGjQs6Sc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JsmUbcwpv9 ++ cat /tmp/tmp.OJOGjQs6Sc ++ rm /tmp/tmp.JsmUbcwpv9 /tmp/tmp.OJOGjQs6Sc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Om33tjxrf +++ mktemp ++ local LAST_ERR=/tmp/tmp.3ISQ7i1ztA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7Om33tjxrf ++ cat /tmp/tmp.3ISQ7i1ztA ++ rm /tmp/tmp.7Om33tjxrf /tmp/tmp.3ISQ7i1ztA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JBr17PN2QH +++ mktemp ++ local LAST_ERR=/tmp/tmp.qXur4cOaL9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JBr17PN2QH ++ cat /tmp/tmp.qXur4cOaL9 ++ rm /tmp/tmp.JBr17PN2QH /tmp/tmp.qXur4cOaL9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vzRGkvRqkt +++ mktemp ++ local LAST_ERR=/tmp/tmp.aMmBQapuFG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vzRGkvRqkt ++ cat /tmp/tmp.aMmBQapuFG ++ rm /tmp/tmp.vzRGkvRqkt /tmp/tmp.aMmBQapuFG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s4jy50O5k6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DJ6DbBBa7d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s4jy50O5k6 ++ cat /tmp/tmp.DJ6DbBBa7d ++ rm /tmp/tmp.s4jy50O5k6 /tmp/tmp.DJ6DbBBa7d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nXkZfEblbN +++ mktemp ++ local LAST_ERR=/tmp/tmp.2XaexT5cM4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nXkZfEblbN ++ cat /tmp/tmp.2XaexT5cM4 ++ rm /tmp/tmp.nXkZfEblbN /tmp/tmp.2XaexT5cM4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jcQemvvhYP +++ mktemp ++ local LAST_ERR=/tmp/tmp.LwTIX8Olwg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jcQemvvhYP ++ cat /tmp/tmp.LwTIX8Olwg ++ rm /tmp/tmp.jcQemvvhYP /tmp/tmp.LwTIX8Olwg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.55FsYPzkKM +++ mktemp ++ local LAST_ERR=/tmp/tmp.a0RBiy1FfW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.55FsYPzkKM ++ cat /tmp/tmp.a0RBiy1FfW ++ rm /tmp/tmp.55FsYPzkKM /tmp/tmp.a0RBiy1FfW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lz9mhIzah4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hR4JPK0E5H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lz9mhIzah4 ++ cat /tmp/tmp.hR4JPK0E5H ++ rm /tmp/tmp.lz9mhIzah4 /tmp/tmp.hR4JPK0E5H ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ttEM7pYs0B +++ mktemp ++ local LAST_ERR=/tmp/tmp.r3ESiU8bI0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ttEM7pYs0B ++ cat /tmp/tmp.r3ESiU8bI0 ++ rm /tmp/tmp.ttEM7pYs0B /tmp/tmp.r3ESiU8bI0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1LtjfOQViD +++ mktemp ++ local LAST_ERR=/tmp/tmp.c4rnBoxYkM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1LtjfOQViD ++ cat /tmp/tmp.c4rnBoxYkM ++ rm /tmp/tmp.1LtjfOQViD /tmp/tmp.c4rnBoxYkM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yJGP2ZWytP +++ mktemp ++ local LAST_ERR=/tmp/tmp.47iQz71VE2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yJGP2ZWytP ++ cat /tmp/tmp.47iQz71VE2 ++ rm /tmp/tmp.yJGP2ZWytP /tmp/tmp.47iQz71VE2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bwfJCAkAjv +++ mktemp ++ local LAST_ERR=/tmp/tmp.9lvLohVF9z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bwfJCAkAjv ++ cat /tmp/tmp.9lvLohVF9z ++ rm /tmp/tmp.bwfJCAkAjv /tmp/tmp.9lvLohVF9z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6AidEiwDZX +++ mktemp ++ local LAST_ERR=/tmp/tmp.rJuqRB0UIH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6AidEiwDZX ++ cat /tmp/tmp.rJuqRB0UIH ++ rm /tmp/tmp.6AidEiwDZX /tmp/tmp.rJuqRB0UIH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yFQ6f9E3OW +++ mktemp ++ local LAST_ERR=/tmp/tmp.y6DIYyifYe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yFQ6f9E3OW ++ cat /tmp/tmp.y6DIYyifYe ++ rm /tmp/tmp.yFQ6f9E3OW /tmp/tmp.y6DIYyifYe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UM6FxZkJCI +++ mktemp ++ local LAST_ERR=/tmp/tmp.g8j8dHR6xW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UM6FxZkJCI ++ cat /tmp/tmp.g8j8dHR6xW ++ rm /tmp/tmp.UM6FxZkJCI /tmp/tmp.g8j8dHR6xW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kqXjMbafZY +++ mktemp ++ local LAST_ERR=/tmp/tmp.39MwutXULp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kqXjMbafZY ++ cat /tmp/tmp.39MwutXULp ++ rm /tmp/tmp.kqXjMbafZY /tmp/tmp.39MwutXULp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hctR8NVQIM +++ mktemp ++ local LAST_ERR=/tmp/tmp.tgp0ZhAbup ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hctR8NVQIM ++ cat /tmp/tmp.tgp0ZhAbup ++ rm /tmp/tmp.hctR8NVQIM /tmp/tmp.tgp0ZhAbup ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cBkZPGkMVJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.stRfmqSeYO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cBkZPGkMVJ ++ cat /tmp/tmp.stRfmqSeYO ++ rm /tmp/tmp.cBkZPGkMVJ /tmp/tmp.stRfmqSeYO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tfdn0b68uN +++ mktemp ++ local LAST_ERR=/tmp/tmp.C1m8N6YK28 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tfdn0b68uN ++ cat /tmp/tmp.C1m8N6YK28 ++ rm /tmp/tmp.tfdn0b68uN /tmp/tmp.C1m8N6YK28 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d3JXn9vjwo +++ mktemp ++ local LAST_ERR=/tmp/tmp.VGBwxVC4WY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d3JXn9vjwo ++ cat /tmp/tmp.VGBwxVC4WY ++ rm /tmp/tmp.d3JXn9vjwo /tmp/tmp.VGBwxVC4WY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jhznzKnT0Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.qZp7capRoX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jhznzKnT0Y ++ cat /tmp/tmp.qZp7capRoX ++ rm /tmp/tmp.jhznzKnT0Y /tmp/tmp.qZp7capRoX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TX0muxxDRD +++ mktemp ++ local LAST_ERR=/tmp/tmp.ltJrVgr080 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TX0muxxDRD ++ cat /tmp/tmp.ltJrVgr080 ++ rm /tmp/tmp.TX0muxxDRD /tmp/tmp.ltJrVgr080 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QrU7He4gHq +++ mktemp ++ local LAST_ERR=/tmp/tmp.ta4xouL6GR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QrU7He4gHq ++ cat /tmp/tmp.ta4xouL6GR ++ rm /tmp/tmp.QrU7He4gHq /tmp/tmp.ta4xouL6GR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OEvIiXk3Vj +++ mktemp ++ local LAST_ERR=/tmp/tmp.FH2LIR9Mfk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OEvIiXk3Vj ++ cat /tmp/tmp.FH2LIR9Mfk ++ rm /tmp/tmp.OEvIiXk3Vj /tmp/tmp.FH2LIR9Mfk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wNmtArgBv0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GoqfPA9EO5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wNmtArgBv0 ++ cat /tmp/tmp.GoqfPA9EO5 ++ rm /tmp/tmp.wNmtArgBv0 /tmp/tmp.GoqfPA9EO5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ampsaEFo50 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2XX4RJy2za ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ampsaEFo50 ++ cat /tmp/tmp.2XX4RJy2za ++ rm /tmp/tmp.ampsaEFo50 /tmp/tmp.2XX4RJy2za ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vxclVGDfn6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KGkgCrkZnA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vxclVGDfn6 ++ cat /tmp/tmp.KGkgCrkZnA ++ rm /tmp/tmp.vxclVGDfn6 /tmp/tmp.KGkgCrkZnA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZpGvzoB7Dk +++ mktemp ++ local LAST_ERR=/tmp/tmp.JF5Fm5uBqJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZpGvzoB7Dk ++ cat /tmp/tmp.JF5Fm5uBqJ ++ rm /tmp/tmp.ZpGvzoB7Dk /tmp/tmp.JF5Fm5uBqJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GgEfMTlnBN +++ mktemp ++ local LAST_ERR=/tmp/tmp.ChrNpsn06D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GgEfMTlnBN ++ cat /tmp/tmp.ChrNpsn06D ++ rm /tmp/tmp.GgEfMTlnBN /tmp/tmp.ChrNpsn06D ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EkwoHqOrJ5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jDF44a6IVJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EkwoHqOrJ5 ++ cat /tmp/tmp.jDF44a6IVJ ++ rm /tmp/tmp.EkwoHqOrJ5 /tmp/tmp.jDF44a6IVJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 51 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Koz4vOLRTg +++ mktemp ++ local LAST_ERR=/tmp/tmp.as58BRvQKU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Koz4vOLRTg ++ cat /tmp/tmp.as58BRvQKU ++ rm /tmp/tmp.Koz4vOLRTg /tmp/tmp.as58BRvQKU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wMLujX6QcU +++ mktemp ++ local LAST_ERR=/tmp/tmp.pT8UJVEycc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wMLujX6QcU ++ cat /tmp/tmp.pT8UJVEycc ++ rm /tmp/tmp.wMLujX6QcU /tmp/tmp.pT8UJVEycc ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.hC4v9OYQDW ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.QM4RI5Fu99 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.hC4v9OYQDW +++++ cat /tmp/tmp.QM4RI5Fu99 +++++ rm /tmp/tmp.hC4v9OYQDW /tmp/tmp.QM4RI5Fu99 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xqmxDYaYIG +++ mktemp ++ local LAST_ERR=/tmp/tmp.nnhCr6snBR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xqmxDYaYIG ++ cat /tmp/tmp.nnhCr6snBR ++ rm /tmp/tmp.xqmxDYaYIG /tmp/tmp.nnhCr6snBR ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WBvQtaXxSb +++ mktemp ++ local LAST_ERR=/tmp/tmp.gUsL93QLgR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WBvQtaXxSb ++ cat /tmp/tmp.gUsL93QLgR ++ rm /tmp/tmp.WBvQtaXxSb /tmp/tmp.gUsL93QLgR ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.yevmLj1IzV ++ mktemp + local LAST_ERR=/tmp/tmp.5ePsUdxIBY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yevmLj1IzV secret/my-cluster-secrets patched + cat /tmp/tmp.5ePsUdxIBY + rm /tmp/tmp.yevmLj1IzV /tmp/tmp.5ePsUdxIBY + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5D2f379dbI +++ mktemp ++ local LAST_ERR=/tmp/tmp.C5VE0Hetox ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5D2f379dbI ++ cat /tmp/tmp.C5VE0Hetox ++ rm /tmp/tmp.5D2f379dbI /tmp/tmp.C5VE0Hetox ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kQAqbAp16k +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xw2Q386jfF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kQAqbAp16k ++ cat /tmp/tmp.Xw2Q386jfF ++ rm /tmp/tmp.kQAqbAp16k /tmp/tmp.Xw2Q386jfF ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.t08JHLYE2I ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.91R5EV4YpY +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.t08JHLYE2I +++++ cat /tmp/tmp.91R5EV4YpY +++++ rm /tmp/tmp.t08JHLYE2I /tmp/tmp.91R5EV4YpY +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fLTpu7XJ1j +++ mktemp ++ local LAST_ERR=/tmp/tmp.dmMQhk632N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fLTpu7XJ1j ++ cat /tmp/tmp.dmMQhk632N ++ rm /tmp/tmp.fLTpu7XJ1j /tmp/tmp.dmMQhk632N ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HDcDU62MyJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.8V31Dt9LIj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HDcDU62MyJ ++ cat /tmp/tmp.8V31Dt9LIj ++ rm /tmp/tmp.HDcDU62MyJ /tmp/tmp.8V31Dt9LIj ++ return 0 + client_pod=pxc-client-59944c5bbf-sk4jk + wait_pod pxc-client-59944c5bbf-sk4jk + local pod=pxc-client-59944c5bbf-sk4jk + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-sk4jk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-sk4jk condition met waiting for pod/pxc-client-59944c5bbf-sk4jk to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.JFgyIkaORV/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3.sql /tmp/tmp.JFgyIkaORV/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V39addrmrJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.8MpBHLktja ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V39addrmrJ ++ cat /tmp/tmp.8MpBHLktja ++ rm /tmp/tmp.V39addrmrJ /tmp/tmp.8MpBHLktja ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-24200 + local namespace=users-24200 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' ++ get_operator_pod + grep -v 'get backup status: Job.batch' ++ local label_prefix=app.kubernetes.io/ + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v level=info + tee /tmp/tmp.JFgyIkaORV/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.dZAwGOWkfQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.yM9HS4ovZA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dZAwGOWkfQ ++ cat /tmp/tmp.yM9HS4ovZA ++ rm /tmp/tmp.dZAwGOWkfQ /tmp/tmp.yM9HS4ovZA ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-6cf85965f9-74zrm ++ mktemp + local LAST_OUT=/tmp/tmp.PJFBmoOJcM ++ mktemp + local LAST_ERR=/tmp/tmp.AaNxiYC8na + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-6cf85965f9-74zrm + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PJFBmoOJcM + cat /tmp/tmp.AaNxiYC8na + rm /tmp/tmp.PJFBmoOJcM /tmp/tmp.AaNxiYC8na + return 0 } }, }, { }, }, { }, }, ""), }, { }, }, }, - }, - { - }, - { - }, - }, + }, + "0057a39240d85a1b609b6123fdb5844700a7da5d97688c6fe463738d3b583", - "13ef286399d060209cd9d602a7db3194424d1e28f51a707d093a77944843a9bb", + "13ef286399d060209cd9d602a7db3194424d1e28f51a707d093a77944843a9bb", ... // 16 identical fields ... // 16 identical fields 2025-11-19T18:46:54.049Z INFO setup Manager starting up {"gitCommit": "7a623b10a97567887377e516f24d3500d7412fc7", "gitBranch": "PR-2154-7a623b10", "buildTime": "2025-11-19T16:30:53Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-11-19T18:46:54.049Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1377000"} 2025-11-19T18:46:54.052Z INFO setup Registering Components. 2025-11-19T18:46:54.427Z INFO controller-runtime.metrics Starting metrics server 2025-11-19T18:46:54.427Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-11-19T18:46:54.427Z INFO setup Starting the Cmd. 2025-11-19T18:46:54.428Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-11-19T18:46:54.428Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-11-19T18:46:54.428Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-11-19T18:46:54.428Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-11-19T18:46:54.428Z INFO controller-runtime.webhook Starting webhook server 2025-11-19T18:46:54.428Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-11-19T18:46:54.529Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-11-19T18:46:54.557Z DEBUG events percona-xtradb-cluster-operator-6cf85965f9-74zrm_27d083e5-dde0-48ad-8433-33412aef067b became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"2fe0dcde-b1b8-4f41-9463-f87ea6ffc77d","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1763578014551519009"}, "reason": "LeaderElection"} 2025-11-19T18:46:54.557Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-11-19T18:46:54.558Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-11-19T18:46:54.558Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-11-19T18:46:54.558Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-11-19T18:46:54.558Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-11-19T18:46:54.659Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-11-19T18:46:54.659Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-11-19T18:46:54.760Z INFO Starting Controller {"controller": "pxc-controller"} 2025-11-19T18:46:54.760Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-11-19T18:46:54.760Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-11-19T18:46:54.760Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-11-19T18:47:32.386Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "9fc2df3c-50f4-4868-9e0c-0a6ee96a5b40", "version": "1.19.0"} 2025-11-19T18:47:32.610Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "9fc2df3c-50f4-4868-9e0c-0a6ee96a5b40", "secrets": "my-cluster-secrets"} 2025-11-19T18:47:32.826Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "9fc2df3c-50f4-4868-9e0c-0a6ee96a5b40", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-19T18:47:32.845Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "9fc2df3c-50f4-4868-9e0c-0a6ee96a5b40", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-19T18:47:33.401Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "9fc2df3c-50f4-4868-9e0c-0a6ee96a5b40", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:47:33.506Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "07c69e54-8e5c-4b14-b8de-98ea31f2c242", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-19T18:47:33.552Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "07c69e54-8e5c-4b14-b8de-98ea31f2c242", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-19T18:47:33.595Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "07c69e54-8e5c-4b14-b8de-98ea31f2c242", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:47:33.631Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "07c69e54-8e5c-4b14-b8de-98ea31f2c242", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:47:33.687Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "07c69e54-8e5c-4b14-b8de-98ea31f2c242", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:47:33.790Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "07c69e54-8e5c-4b14-b8de-98ea31f2c242", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T18:47:34.286Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "a5afd876-455c-4872-949f-9ccea32eb2c4", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-19T18:47:34.304Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "a5afd876-455c-4872-949f-9ccea32eb2c4", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-19T18:48:45.522Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e478e8e2-0848-44b0-ae4f-01058614c352", "user": "operator"} 2025-11-19T18:48:45.557Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e478e8e2-0848-44b0-ae4f-01058614c352", "user": "monitor"} 2025-11-19T18:48:45.604Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e478e8e2-0848-44b0-ae4f-01058614c352"} 2025-11-19T18:48:45.640Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e478e8e2-0848-44b0-ae4f-01058614c352"} 2025-11-19T18:48:45.672Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e478e8e2-0848-44b0-ae4f-01058614c352", "user": "xtrabackup"} 2025-11-19T18:48:45.708Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e478e8e2-0848-44b0-ae4f-01058614c352"} 2025-11-19T18:48:45.747Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e478e8e2-0848-44b0-ae4f-01058614c352", "user": "replication"} 2025-11-19T18:48:45.756Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e478e8e2-0848-44b0-ae4f-01058614c352", "err": "get primary pxc pod: not found"} 2025-11-19T18:48:50.476Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "d950e9db-498a-42a0-8386-e6ec90530694", "err": "get primary pxc pod: not found"} 2025-11-19T18:48:55.621Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "9b9e8d6b-8205-4028-bffc-1573937b8e90", "err": "get primary pxc pod: not found"} 2025-11-19T18:49:00.753Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "8184fdb0-e875-4b94-a24e-399a844a6560", "err": "get primary pxc pod: not found"} 2025-11-19T18:51:06.786Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "d2b94c94-94b9-4bce-b90d-a6e28ad92507", "user": "root"} 2025-11-19T18:51:06.896Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "d2b94c94-94b9-4bce-b90d-a6e28ad92507", "new version": "8.0.43-34.1"} 2025-11-19T18:51:08.590Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "d2b94c94-94b9-4bce-b90d-a6e28ad92507"} 2025-11-19T18:51:13.685Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "b91aaf09-b40b-4818-9718-d45849929753"} 2025-11-19T18:51:18.879Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "7cb253f8-3771-4a00-ad8b-5d1f2c4e44d2"} 2025-11-19T18:51:24.177Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e60406d9-cd7c-4255-8f54-9c2b34b9bdea"} 2025-11-19T18:51:29.688Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e494783f-b911-4ac2-b437-e689784b47bd"} 2025-11-19T18:51:34.825Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "b6c6e37e-f271-42e3-b3a4-fe6c627f3257"} 2025-11-19T18:51:40.186Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "06271875-7a9b-4b5f-85c5-34a9cf626b18"} 2025-11-19T18:51:45.577Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "a19c0bc0-b256-4adc-82bd-e7eaa1b30052"} 2025-11-19T18:51:50.876Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6abde7e7-f533-494a-afcb-8220f2518f57"} 2025-11-19T18:51:56.191Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6df4e2da-ad3b-4e6c-a393-c2594dfb7101"} 2025-11-19T18:52:01.088Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "3ccbf6dd-8713-4615-ba3e-c95b4bbe4006"} 2025-11-19T18:52:06.582Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "a427d7a5-b1d5-4547-aa0b-5c4b15e6a824"} 2025-11-19T18:52:12.194Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e3e2da4e-cfc4-4c80-9409-87d3045dd0aa"} 2025-11-19T18:52:17.387Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6e5d8f82-5eb8-4d4b-96e6-63dbe8778312"} 2025-11-19T18:52:22.719Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "3a05a19a-6b10-45ae-9306-4aa689bcea62"} 2025-11-19T18:52:28.020Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "4fe41654-dadb-4116-b32c-f3b767fe6f8f"} 2025-11-19T18:52:33.192Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "90fe080e-3f57-43d8-8f9b-1a4f3d90485a"} 2025-11-19T18:52:38.395Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "f4cc11fe-3984-4694-81fb-e8de11132233"} 2025-11-19T18:52:39.107Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "65b35271-b6a3-437c-8bc6-b5bd0f07c63b", "user": "root"} 2025-11-19T18:52:39.127Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "65b35271-b6a3-437c-8bc6-b5bd0f07c63b", "user": "root"} 2025-11-19T18:52:39.152Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "65b35271-b6a3-437c-8bc6-b5bd0f07c63b", "secret": "some-name-mysql-init", "user": "root"} 2025-11-19T18:52:41.298Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "65b35271-b6a3-437c-8bc6-b5bd0f07c63b"} 2025-11-19T18:52:41.324Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "65b35271-b6a3-437c-8bc6-b5bd0f07c63b", "user": "root"} 2025-11-19T18:52:41.342Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "65b35271-b6a3-437c-8bc6-b5bd0f07c63b", "user": "root"} 2025-11-19T18:52:43.194Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "65b35271-b6a3-437c-8bc6-b5bd0f07c63b"} 2025-11-19T18:52:48.983Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "3c517846-0ec5-435f-bfc9-4f1d53f121a1"} 2025-11-19T18:52:54.479Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "aae87363-cd81-4eb5-b130-8ca4f9b9500b"} 2025-11-19T18:52:59.794Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "decfdee9-40c3-470b-9cd5-89daee20e391", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:52:59.818Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "f75cd646-dc2d-4bcd-93e1-595f47bf5457"} 2025-11-19T18:52:59.852Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "decfdee9-40c3-470b-9cd5-89daee20e391", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:53:24.286Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "dffc2a64-7427-46bb-b791-8e85f2593854", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:53:24.869Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "64f5ea35-8d16-4bb8-9705-0bd239d87248", "user": "proxyadmin"} 2025-11-19T18:53:24.869Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "64f5ea35-8d16-4bb8-9705-0bd239d87248", "user": "proxyadmin"} 2025-11-19T18:53:24.898Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "64f5ea35-8d16-4bb8-9705-0bd239d87248", "user": "proxyadmin"} 2025-11-19T18:53:24.922Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "64f5ea35-8d16-4bb8-9705-0bd239d87248", "user": "proxyadmin"} 2025-11-19T18:53:24.922Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "64f5ea35-8d16-4bb8-9705-0bd239d87248", "last-applied-secret": "37445234508b80c372ee8f606ad658d43860d550eb9686b73610eb9605fc3826"} 2025-11-19T18:53:24.926Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "64f5ea35-8d16-4bb8-9705-0bd239d87248", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:53:24.978Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "64f5ea35-8d16-4bb8-9705-0bd239d87248", "err": "get primary pxc pod: not found"} 2025-11-19T18:53:26.716Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "64f5ea35-8d16-4bb8-9705-0bd239d87248", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:54:05.926Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "de4ce043-77dd-452e-a402-b07eb491b49a"} 2025-11-19T18:54:11.047Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "7ca8ebd2-f171-4141-ab5a-1dd52b924cbc"} 2025-11-19T18:54:11.712Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "f333c3bf-03d7-4c5f-8b38-c0897c6a3db7", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:54:11.753Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "f333c3bf-03d7-4c5f-8b38-c0897c6a3db7", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:54:13.796Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "f333c3bf-03d7-4c5f-8b38-c0897c6a3db7"} 2025-11-19T18:54:13.999Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "c2d80fd8-913d-49ff-8263-9be96b600e58", "user": "xtrabackup"} 2025-11-19T18:54:14.013Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "c2d80fd8-913d-49ff-8263-9be96b600e58", "user": "xtrabackup"} 2025-11-19T18:54:14.036Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "c2d80fd8-913d-49ff-8263-9be96b600e58", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-19T18:54:14.057Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "c2d80fd8-913d-49ff-8263-9be96b600e58", "user": "xtrabackup"} 2025-11-19T18:54:14.070Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "c2d80fd8-913d-49ff-8263-9be96b600e58", "user": "xtrabackup"} 2025-11-19T18:54:14.076Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "c2d80fd8-913d-49ff-8263-9be96b600e58", "last-applied-secret": "feb68e0e2d6ccd0946569222441c5199bf690516002015e16b4d3fd6662061f9"} 2025-11-19T18:54:14.079Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "c2d80fd8-913d-49ff-8263-9be96b600e58", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:54:16.496Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "c2d80fd8-913d-49ff-8263-9be96b600e58"} 2025-11-19T18:55:21.957Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "de3207d8-7cce-4b94-9a95-c5777c40d67e", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-24200 on 34.118.224.10:53: no such host"} 2025-11-19T18:56:14.217Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "5c4cf554-3763-4513-80a9-218154e09d5c", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-24200 on 34.118.224.10:53: no such host"} 2025-11-19T18:56:19.389Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6d2d27cd-bd47-472c-bffb-4ef99bf56392", "primary name": "some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local"} 2025-11-19T18:56:24.512Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "5213cda5-f530-4f70-9a69-f388e9a2fc99", "primary name": "some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local"} 2025-11-19T18:56:29.649Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "5915b9ed-d661-4cbb-8fc5-6f135a93be71", "primary name": "some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local"} 2025-11-19T18:56:34.776Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "9a81eeef-9234-46d6-958e-30f96aa8d89e", "primary name": "some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local"} 2025-11-19T18:56:39.951Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "9dd709c6-20e1-4d61-bd79-c896045411bb", "primary name": "some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local"} 2025-11-19T18:56:45.111Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "b35b0338-de59-4b59-bce0-ac356da610c2", "primary name": "some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local"} 2025-11-19T18:56:52.836Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "dfbdd406-08bf-402c-87c1-b6b6176f22a9"} 2025-11-19T18:56:57.312Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "ae0f4420-bd88-4982-8e4f-11a534327dff", "user": "monitor"} 2025-11-19T18:56:57.324Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "ae0f4420-bd88-4982-8e4f-11a534327dff", "user": "monitor"} 2025-11-19T18:56:57.342Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "ae0f4420-bd88-4982-8e4f-11a534327dff", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-19T18:56:57.360Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "ae0f4420-bd88-4982-8e4f-11a534327dff", "user": "monitor"} 2025-11-19T18:56:57.379Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "ae0f4420-bd88-4982-8e4f-11a534327dff", "user": "monitor"} 2025-11-19T18:56:57.851Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "ae0f4420-bd88-4982-8e4f-11a534327dff", "last-applied-secret": "13ef286399d060209cd9d602a7db3194424d1e28f51a707d093a77944843a9bb"} 2025-11-19T18:56:57.855Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "ae0f4420-bd88-4982-8e4f-11a534327dff", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:56:59.554Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "70bd42bd-f315-426f-a370-e176e5487d3e", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: monitor\nAdding user to ProxySQL: monitor\n Added query rule for user: monitor\n / ERROR 2026 (HY000): SSL connection error: error:0A000126:SSL routines::unexpected eof while reading\n", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: monitor\nAdding user to ProxySQL: monitor\n Added query rule for user: monitor\n / ERROR 2026 (HY000): SSL connection error: error:0A000126:SSL routines::unexpected eof while reading\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:57:34.491Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0ddf2aff-73a7-4d33-a42a-6289562522b2", "user": "monitor"} 2025-11-19T18:57:36.564Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0ddf2aff-73a7-4d33-a42a-6289562522b2"} 2025-11-19T18:57:42.556Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0cd3ed5c-449e-4a8c-aff7-d3423ab61125", "user": "monitor"} 2025-11-19T18:57:44.551Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0cd3ed5c-449e-4a8c-aff7-d3423ab61125"} 2025-11-19T18:57:47.550Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "ef8a3424-c505-4e71-90b3-637e0533f7d3", "user": "monitor"} 2025-11-19T18:57:49.548Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "ef8a3424-c505-4e71-90b3-637e0533f7d3"} 2025-11-19T18:57:53.094Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "8d82e7af-d08c-426b-80c2-1f309cc53ed8", "user": "monitor"} 2025-11-19T18:57:55.159Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "8d82e7af-d08c-426b-80c2-1f309cc53ed8"} 2025-11-19T18:57:58.896Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "cb36433a-6d69-4c1b-83e0-7a9a9d24df35", "user": "monitor"} 2025-11-19T18:57:59.772Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "cb36433a-6d69-4c1b-83e0-7a9a9d24df35", "user": "monitor"} 2025-11-19T18:57:59.786Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "cb36433a-6d69-4c1b-83e0-7a9a9d24df35", "last-applied-secret": "13ef286399d060209cd9d602a7db3194424d1e28f51a707d093a77944843a9bb"} 2025-11-19T18:58:01.559Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "cb36433a-6d69-4c1b-83e0-7a9a9d24df35"} 2025-11-19T18:58:06.844Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "f4fadde7-bef3-488e-8611-e58431305f5f"} 2025-11-19T18:58:12.158Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "36a46853-00ae-4589-84bb-8280ed505f55"} 2025-11-19T18:58:17.043Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "d180cf58-cc1d-47c1-a703-eff4d6e538ad"} 2025-11-19T18:58:22.761Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "1919c5fa-6170-4b1b-8c8e-b9ba025b5593"} 2025-11-19T18:58:26.722Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e0071205-d018-4313-b613-02aa57ee2941", "user": "operator"} 2025-11-19T18:58:26.734Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e0071205-d018-4313-b613-02aa57ee2941", "user": "operator"} 2025-11-19T18:58:26.751Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e0071205-d018-4313-b613-02aa57ee2941", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-19T18:58:26.775Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e0071205-d018-4313-b613-02aa57ee2941", "user": "operator"} 2025-11-19T18:58:26.787Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e0071205-d018-4313-b613-02aa57ee2941", "user": "operator"} 2025-11-19T18:58:26.806Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e0071205-d018-4313-b613-02aa57ee2941", "last-applied-secret": "9bcab9413f9c1312dd1b648fe4efa170b92813633c2e751a62b75ee2ea44c229"} 2025-11-19T18:58:26.810Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e0071205-d018-4313-b613-02aa57ee2941", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:58:29.980Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "7530f788-b90a-4ff3-b6a4-274b8f559323", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:58:55.222Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "248c6a98-ed01-4e6e-a5b3-d25f53f0254d"} 2025-11-19T18:58:59.930Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "17c1c5e0-29e0-4fe3-bc2e-a8f975d7e392"} 2025-11-19T18:59:05.035Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "51e85a6d-7a83-4717-8c18-2b7d84ae7fd1"} 2025-11-19T18:59:10.413Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "2e711f55-788f-413e-8916-059a70faa387"} 2025-11-19T18:59:11.965Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "secrets": "my-cluster-secrets-2"} 2025-11-19T18:59:11.971Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "root"} 2025-11-19T18:59:11.991Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "root"} 2025-11-19T18:59:12.014Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "secret": "some-name-mysql-init", "user": "root"} 2025-11-19T18:59:14.388Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960"} 2025-11-19T18:59:14.411Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "root"} 2025-11-19T18:59:14.431Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "root"} 2025-11-19T18:59:14.438Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "operator"} 2025-11-19T18:59:14.450Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "operator"} 2025-11-19T18:59:14.469Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-19T18:59:14.487Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "operator"} 2025-11-19T18:59:14.500Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "operator"} 2025-11-19T18:59:14.507Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "monitor"} 2025-11-19T18:59:14.519Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "monitor"} 2025-11-19T18:59:14.536Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-19T18:59:14.554Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "monitor"} 2025-11-19T18:59:14.573Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "monitor"} 2025-11-19T18:59:14.878Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "xtrabackup"} 2025-11-19T18:59:14.891Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "xtrabackup"} 2025-11-19T18:59:14.910Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-19T18:59:14.942Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "xtrabackup"} 2025-11-19T18:59:14.955Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "xtrabackup"} 2025-11-19T18:59:14.960Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "replication"} 2025-11-19T18:59:14.974Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "replication"} 2025-11-19T18:59:14.991Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-19T18:59:15.017Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "replication"} 2025-11-19T18:59:15.033Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "replication"} 2025-11-19T18:59:15.033Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "proxyadmin"} 2025-11-19T18:59:15.050Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "proxyadmin"} 2025-11-19T18:59:15.071Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "user": "proxyadmin"} 2025-11-19T18:59:15.071Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "last-applied-secret": "5746a4758a36fb798bf250367e30bdb16ddb3c87cd6deb3ed8f3a724a9321867"} 2025-11-19T18:59:15.071Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "last-applied-secret": "5746a4758a36fb798bf250367e30bdb16ddb3c87cd6deb3ed8f3a724a9321867"} 2025-11-19T18:59:15.073Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:59:15.137Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T18:59:16.705Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e09333ba-7e32-4bc5-ab32-d23132bc5960", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T18:59:32.454Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "7f196557-af75-47f2-ac6b-fe9eb79786ab", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-24200 on 34.118.224.10:53: no such host"} 2025-11-19T19:00:09.009Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e4def9de-58d0-4cac-9326-8de49c23d1d0", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-24200 on 34.118.224.10:53: no such host"} 2025-11-19T19:00:14.190Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e0407acd-c432-4eab-9c90-b02ceda3508a", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-24200 on 34.118.224.10:53: no such host"} 2025-11-19T19:01:11.832Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "9a018ce5-3c84-48fc-be48-e621236d1590", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-24200 on 34.118.224.10:53: no such host"} 2025-11-19T19:01:17.067Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e1809225-ee63-4cfb-91f8-36f38aa3ddda", "primary name": "some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local"} 2025-11-19T19:01:22.187Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "17c09030-e79f-4ee3-968d-7c399320df21", "primary name": "some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local"} 2025-11-19T19:01:27.325Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "26eeb14f-e7ab-43d8-9251-6370b2faf175", "primary name": "some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local"} 2025-11-19T19:01:32.618Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "b303ddfb-cf75-4142-9fa7-6f5daca54567", "primary name": "some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local"} 2025-11-19T19:01:37.751Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "003d41ff-e6f6-4db1-91df-628cf10678b9", "primary name": "some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local"} 2025-11-19T19:01:42.926Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "b3b17cf5-034e-4340-916b-bc4e08b06ca4", "primary name": "some-name-pxc-0.some-name-pxc.users-24200.svc.cluster.local"} 2025-11-19T19:01:48.936Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "f46d7f2c-4d80-49a8-ac0e-f857eea003a9", "user": "monitor"} 2025-11-19T19:01:49.786Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "f46d7f2c-4d80-49a8-ac0e-f857eea003a9", "user": "monitor"} 2025-11-19T19:01:49.799Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "f46d7f2c-4d80-49a8-ac0e-f857eea003a9", "last-applied-secret": "5746a4758a36fb798bf250367e30bdb16ddb3c87cd6deb3ed8f3a724a9321867"} 2025-11-19T19:01:51.608Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "f46d7f2c-4d80-49a8-ac0e-f857eea003a9"} 2025-11-19T19:01:55.710Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "f551bff3-dac8-487f-9d78-714dfe61afb6"} 2025-11-19T19:01:56.745Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6e3d51ed-a71d-4ce8-8bbb-8a11de779e57", "user": "operator"} 2025-11-19T19:01:56.759Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6e3d51ed-a71d-4ce8-8bbb-8a11de779e57", "user": "operator"} 2025-11-19T19:01:56.784Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6e3d51ed-a71d-4ce8-8bbb-8a11de779e57", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-19T19:01:56.800Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6e3d51ed-a71d-4ce8-8bbb-8a11de779e57", "user": "operator"} 2025-11-19T19:01:56.813Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6e3d51ed-a71d-4ce8-8bbb-8a11de779e57", "user": "operator"} 2025-11-19T19:01:56.832Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6e3d51ed-a71d-4ce8-8bbb-8a11de779e57", "last-applied-secret": "22efc40bc46d6a1e32ae38c97463028d9da0cacd17c87f6ee2a0ca962767cda0"} 2025-11-19T19:01:56.835Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6e3d51ed-a71d-4ce8-8bbb-8a11de779e57", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T19:02:00.648Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6e3d51ed-a71d-4ce8-8bbb-8a11de779e57", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-24200.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T19:02:30.305Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "215d2adc-174b-4b4b-a9c0-1f37d4595992"} 2025-11-19T19:02:34.599Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "1109890c-76c8-4ade-bed2-c82f9b8e6e98"} 2025-11-19T19:02:39.986Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0173d618-40ce-43a7-91ab-90898daedee5"} 2025-11-19T19:02:45.223Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "b41aa986-a043-4423-91f4-a30941cb28ea"} 2025-11-19T19:02:50.508Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "2a440a99-2145-4f40-b9fe-3d5d185d0e8f"} 2025-11-19T19:02:55.602Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "be1a7266-0b10-4874-bc36-2cdfee90baf3"} 2025-11-19T19:03:01.507Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "c8d43ee5-1f03-4494-aff1-0044e15b7bb7"} 2025-11-19T19:03:06.613Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "b9e709f6-76f6-4514-bd45-87dd9d3125fc"} 2025-11-19T19:03:11.912Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "988472a6-a9f1-4599-a9b7-1de020cbf32a"} 2025-11-19T19:03:17.210Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "be3b2723-a70e-4366-bd04-93cea8ee4b0b"} 2025-11-19T19:03:22.513Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "65130503-c509-4734-bbbd-5bfd4d61f750"} 2025-11-19T19:03:27.816Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "c1c6bb60-c6cd-41e5-ba18-55db754066f6"} 2025-11-19T19:03:33.125Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "15c99c80-ed9e-4046-9724-e7c0593daa7c"} 2025-11-19T19:03:38.207Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "ae137d43-4fee-46a3-b76f-0b88ead6a775"} 2025-11-19T19:03:43.737Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "60c271e1-d8d7-4fd2-990e-d4f094daccf1"} 2025-11-19T19:03:44.875Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "root"} 2025-11-19T19:03:44.903Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "root"} 2025-11-19T19:03:44.927Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "secret": "some-name-mysql-init", "user": "root"} 2025-11-19T19:03:47.115Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3"} 2025-11-19T19:03:47.141Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "root"} 2025-11-19T19:03:47.161Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "root"} 2025-11-19T19:03:47.175Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "monitor"} 2025-11-19T19:03:47.188Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "monitor"} 2025-11-19T19:03:47.208Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-19T19:03:47.224Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "monitor"} 2025-11-19T19:03:47.248Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "monitor"} 2025-11-19T19:03:47.525Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "xtrabackup"} 2025-11-19T19:03:47.537Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "xtrabackup"} 2025-11-19T19:03:47.559Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-19T19:03:47.580Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "xtrabackup"} 2025-11-19T19:03:47.593Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "xtrabackup"} 2025-11-19T19:03:47.601Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "proxyadmin"} 2025-11-19T19:03:47.619Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "proxyadmin"} 2025-11-19T19:03:47.638Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "user": "proxyadmin"} 2025-11-19T19:03:47.638Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "last-applied-secret": "6c50057a39240d85a1b609b6123fdb5844700a7da5d97688c6fe463738d3b583"} 2025-11-19T19:03:47.638Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "last-applied-secret": "6c50057a39240d85a1b609b6123fdb5844700a7da5d97688c6fe463738d3b583"} 2025-11-19T19:03:47.641Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T19:03:47.705Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T19:03:49.470Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "977690e2-7fb3-4d17-b522-a469838b80e3", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-19T19:04:04.552Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "43765a5b-cbf8-4fee-9078-898792b326a5", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T19:04:04.626Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "43765a5b-cbf8-4fee-9078-898792b326a5", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-19T19:04:04.689Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "43765a5b-cbf8-4fee-9078-898792b326a5", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-19T19:04:04.755Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "43765a5b-cbf8-4fee-9078-898792b326a5", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T19:04:04.839Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "43765a5b-cbf8-4fee-9078-898792b326a5", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-19T19:04:05.720Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "b548dd61-fc27-4431-9b1d-cf5a5202edf8", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-19T19:05:48.562Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "e3290387-4463-4bab-b1fa-e59814e30cde", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.171.176.68:33062: i/o timeout"} 2025-11-19T19:06:24.983Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "7b4ac9ee-d896-4631-a8c5-f78f4aae54f0", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-24200 on 34.118.224.10:53: no such host"} 2025-11-19T19:07:04.934Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "root"} 2025-11-19T19:07:04.951Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "root"} 2025-11-19T19:07:04.968Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "secret": "some-name-mysql-init", "user": "root"} 2025-11-19T19:07:04.991Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "root"} 2025-11-19T19:07:05.009Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "root"} 2025-11-19T19:07:05.014Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "operator"} 2025-11-19T19:07:05.025Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "operator"} 2025-11-19T19:07:05.046Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-19T19:07:05.068Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "operator"} 2025-11-19T19:07:05.080Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "operator"} 2025-11-19T19:07:05.085Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "monitor"} 2025-11-19T19:07:05.097Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "monitor"} 2025-11-19T19:07:05.124Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-19T19:07:05.144Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "monitor"} 2025-11-19T19:07:05.466Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "xtrabackup"} 2025-11-19T19:07:05.478Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "xtrabackup"} 2025-11-19T19:07:05.497Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-19T19:07:05.514Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "xtrabackup"} 2025-11-19T19:07:05.526Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "xtrabackup"} 2025-11-19T19:07:05.532Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "replication"} 2025-11-19T19:07:05.544Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "replication"} 2025-11-19T19:07:05.573Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-19T19:07:05.594Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "replication"} 2025-11-19T19:07:05.605Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "user": "replication"} 2025-11-19T19:07:05.605Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "last-applied-secret": "9bcab9413f9c1312dd1b648fe4efa170b92813633c2e751a62b75ee2ea44c229"} 2025-11-19T19:07:05.607Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "0dad01f5-23c7-4fbf-bf75-df83f2f79abc", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-19T19:08:07.168Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "6ad3e694-f734-4481-94e0-d97d0b8a6531", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.171.176.72:33062: connect: connection refused"} 2025-11-19T19:08:50.279Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "b6c4a466-e88a-4f50-99e4-65c71256d232", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: invalid connection"} 2025-11-19T19:09:27.181Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "4643246c-c4d8-4db3-b73c-efff2fa7b1d3", "user": "monitor"} 2025-11-19T19:09:28.278Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "4643246c-c4d8-4db3-b73c-efff2fa7b1d3", "user": "monitor"} 2025-11-19T19:09:34.086Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "a9b0a18f-f3d0-4295-a966-ffe980a339ea", "user": "monitor"} 2025-11-19T19:09:34.097Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "a9b0a18f-f3d0-4295-a966-ffe980a339ea", "user": "monitor"} 2025-11-19T19:09:34.118Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "a9b0a18f-f3d0-4295-a966-ffe980a339ea", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-19T19:09:34.140Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "a9b0a18f-f3d0-4295-a966-ffe980a339ea", "user": "monitor"} 2025-11-19T19:09:37.429Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "d9ed7a48-6eaa-4bdd-849c-ecf72f2c62cc", "user": "monitor"} 2025-11-19T19:09:43.097Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "bcc9ce9d-425f-4b48-a582-dd61c5e7d949", "user": "monitor"} 2025-11-19T19:09:48.688Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "869b6b8b-11cc-437f-b353-de5f5d24daa7", "user": "monitor"} 2025-11-19T19:09:54.303Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "bab15e2c-a860-4838-8a24-c07f69769de0", "user": "monitor"} 2025-11-19T19:09:59.846Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-24200", "name": "some-name", "reconcileID": "8de57165-5ac3-4b8e-943f-0d6b6ad02404", "user": "monitor"} - "22efc40bc46d6a1e32ae38c97463028d9da0cacd17c87f6ee2a0ca962767cda0", + "22efc40bc46d6a1e32ae38c97463028d9da0cacd17c87f6ee2a0ca962767cda0", ... // 22 identical fields ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields - "37445234508b80c372ee8f606ad658d43860d550eb9686b73610eb9605fc3826", ... // 3 identical elements ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields ... // 4 identical fields "5", - "5746a4758a36fb798bf250367e30bdb16ddb3c87cd6deb3ed8f3a724a9321867", + "5746a4758a36fb798bf250367e30bdb16ddb3c87cd6deb3ed8f3a724a9321867", ... // 5 identical fields ... // 5 identical fields ... // 5 identical fields + "6c", - "6c50057a39240d85a1b609b6123fdb5844700a7da5d97688c6fe463738d3b583", + "6c50057a39240d85a1b609b6123fdb5844700a7da5d97688c6fe463738d3b583", ... // 6 identical fields ... // 6 identical fields - "746a4758a36fb798bf250367e30bdb16ddb3c87cd6deb3ed8f3a724a9321867", ... // 7 identical fields ... // 8 identical fields - "9bcab9413f9c1312dd1b648fe4efa170b92813633c2e751a62b75ee2ea44c229", + "9bcab9413f9c1312dd1b648fe4efa170b92813633c2e751a62b75ee2ea44c229", ... // 9 identical fields ... // 9 identical fields AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ + APIVersion: "", - APIVersion: "apps/v1", - APIVersion: "apps/v1", - APIVersion: "v1", Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, - Args: []string{"logrotate"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMapKeyRef: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-11-19 18:47:33 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-proxysql-5666d84c9c", - CurrentRevision: "some-name-proxysql-59d94c45bc", - CurrentRevision: "some-name-proxysql-777d6df5f8", - CurrentRevision: "some-name-proxysql-79557d69cd", - CurrentRevision: "some-name-proxysql-865574bdb6", - CurrentRevision: "some-name-proxysql-95fcf5777", - CurrentRevision: "some-name-pxc-57cc6986d7", - CurrentRevision: "some-name-pxc-5cfdb55c79", - CurrentRevision: "some-name-pxc-75df5c477c", - CurrentRevision: "some-name-pxc-7897797f9d", DataSource: nil, DataSourceRef: nil, - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, - EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, Env: []v1.EnvVar{ - Env: []v1.EnvVar{ EphemeralContainers: nil, FailureThreshold: 3, FC: nil, - "feb68e0e2d6ccd0946569222441c5199bf690516002015e16b4d3fd6662061f9", FieldPath: "metadata.name", FieldPath: "metadata.namespace", FieldRef: &v1.ObjectFieldSelector{ - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 HostAliases: nil, HostIP: "", HostPort: 0, - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", ImagePullPolicy: "Always", - ImagePullPolicy: "Always", InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "37445234508b80c372ee8f606ad658d43860d550eb9686b73610eb9605fc3826", + "last-applied-secret": "feb68e0e2d6ccd0946569222441c5199bf690516002015e16b4d3fd6662061f9", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, [mysql] 2025/11/19 19:06:42 packets.go:58 unexpected EOF [mysql] 2025/11/19 19:08:50 packets.go:58 read tcp 10.171.176.59:39944->10.171.177.56:33062: read: connection reset by peer Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, {Name: "CLUSTER_HASH", Value: "2927713"}, Name: "config", Name: "DEFAULT_AUTHENTICATION_PLUGIN", - {Name: "IS_LOGCOLLECTOR", Value: "yes"}, Name: "ist", {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - Name: "logrotate", - Name: "logs", {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, - {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"}, {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"}, Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, Name: "POD_NAME", Name: "POD_NAMESPASE", - {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, - {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, Name: "proxyadm", {Name: "READINESS_CHECK_TIMEOUT", Value: "15"}, - {Name: "SERVICE_TYPE", Value: "mysql"}, Namespace: "users-24200", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "37445234508b80c372ee8f606ad658d43860d550eb9686b73610eb9605fc3826", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "6c50057a39240d85a1b609b6123fdb5844700a7da5d97688c6fe463738d3b583", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "45c9ae4d-c458-4915-8e94-0fc7895c0fad", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjJlZmM0MGJjNDZkNmExZTMyYWUzOGM5NzQ2MzAyOGQ5ZGEwY2FjZDE3Yzg3ZjZlZTJhMGNhOTYyNzY3Y2RhMCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjJlZmM0MGJjNDZkNmExZTMyYWUzOGM5NzQ2MzAyOGQ5ZGEwY2FjZDE3Yzg3ZjZlZTJhMGNhOTYyNzY3Y2RhMCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTNlZjI4NjM5OWQwNjAyMDljZDlkNjAyYTdkYjMxOTQ0MjRkMWUyOGY1MWE3MDdkMDkzYTc3OTQ0ODQzYTliYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTNlZjI4NjM5OWQwNjAyMDljZDlkNjAyYTdkYjMxOTQ0MjRkMWUyOGY1MWE3MDdkMDkzYTc3OTQ0ODQzYTliYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzc0NDUyMzQ1MDhiODBjMzcyZWU4ZjYwNmFkNjU4ZDQzODYwZDU1MGViOTY4NmI3MzYxMGViOTYwNWZjMzgyNiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmM1MDA1N2EzOTI0MGQ4NWExYjYwOWI2MTIzZmRiNTg0NDcwMGE3ZGE1ZDk3Njg4YzZmZTQ2MzczOGQzYjU4MyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTc0NmE0NzU4YTM2ZmI3OThiZjI1MDM2N2UzMGJkYjE2ZGRiM2M4N2NkNmRlYjNlZDhmM2E3MjRhOTMyMTg2NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTc0NmE0NzU4YTM2ZmI3OThiZjI1MDM2N2UzMGJkYjE2ZGRiM2M4N2NkNmRlYjNlZDhmM2E3MjRhOTMyMTg2NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWJjYWI5NDEzZjljMTMxMmRkMWI2NDhmZTRlZmExNzBiOTI4MTM2MzNjMmU3NTFhNjJiNzVlZTJlYTQ0YzIyOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWJjYWI5NDEzZjljMTMxMmRkMWI2NDhmZTRlZmExNzBiOTI4MTM2MzNjMmU3NTFhNjJiNzVlZTJlYTQ0YzIyOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmM1MDA1N2EzOTI0MGQ4NWExYjYwOWI2MTIzZmRiNTg0NDcwMGE3ZGE1ZDk3Njg4YzZmZTQ2MzczOGQzYjU4MyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmM1MDA1N2EzOTI0MGQ4NWExYjYwOWI2MTIzZmRiNTg0NDcwMGE3ZGE1ZDk3Njg4YzZmZTQ2MzczOGQzYjU4MyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmM1MDA1N2EzOTI0MGQ4NWExYjYwOWI2MTIzZmRiNTg0NDcwMGE3ZGE1ZDk3Njg4YzZmZTQ2MzczOGQzYjU4MyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTU0LTdhNjIzYjEwIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM4LjAiLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmM1MDA1N2EzOTI0MGQ4NWExYjYwOWI2MTIzZmRiNTg0NDcwMGE3ZGE1ZDk3Njg4YzZmZTQ2MzczOGQzYjU4MyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTU0LTdhNjIzYjEwIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIyOTI3NzEzIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTc0NmE0NzU4YTM2ZmI3OThiZjI1MDM2N2UzMGJkYjE2ZGRiM2M4N2NkNmRlYjNlZDhmM2E3MjRhOTMyMTg2NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTc0NmE0NzU4YTM2ZmI3OThiZjI1MDM2N2UzMGJkYjE2ZGRiM2M4N2NkNmRlYjNlZDhmM2E3MjRhOTMyMTg2NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWJjYWI5NDEzZjljMTMxMmRkMWI2NDhmZTRlZmExNzBiOTI4MTM2MzNjMmU3NTFhNjJiNzVlZTJlYTQ0YzIyOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZmViNjhlMGUyZDZjY2QwOTQ2NTY5MjIyNDQxYzUxOTliZjY5MDUxNjAwMjAxNWUxNmI0ZDNmZDY2NjIwNjFmOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZmViNjhlMGUyZDZjY2QwOTQ2NTY5MjIyNDQxYzUxOTliZjY5MDUxNjAwMjAxNWUxNmI0ZDNmZDY2NjIwNjFmOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzc0NDUyMzQ1MDhiODBjMzcyZWU4ZjYwNmFkNjU4ZDQzODYwZDU1MGViOTY4NmI3MzYxMGViOTYwNWZjMzgyNiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: nil, Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, ResizePolicy: nil, ResourceFieldRef: nil, Resources: {}, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1763578086215007001", - ResourceVersion: "1763578265728399016", - ResourceVersion: "1763578395226175001", - ResourceVersion: "1763578428285711001", - ResourceVersion: "1763578452369583001", - ResourceVersion: "1763578608431311016", - ResourceVersion: "1763578642625535001", - ResourceVersion: "1763578726770431001", - ResourceVersion: "1763578769844895001", - ResourceVersion: "1763578907516927016", - ResourceVersion: "1763578941082751001", - ResourceVersion: "1763579043186095016", - ResourceVersion: "1763579224115039016", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-11-19 18:47:33 +0000 UTC", - Time: s"2025-11-19 18:48:06 +0000 UTC", - Time: s"2025-11-19 18:51:05 +0000 UTC", - Time: s"2025-11-19 18:52:59 +0000 UTC", - Time: s"2025-11-19 18:53:15 +0000 UTC", - Time: s"2025-11-19 18:53:24 +0000 UTC", - Time: s"2025-11-19 18:53:48 +0000 UTC", - Time: s"2025-11-19 18:54:11 +0000 UTC", - Time: s"2025-11-19 18:54:12 +0000 UTC", - Time: s"2025-11-19 18:54:14 +0000 UTC", - Time: s"2025-11-19 18:56:48 +0000 UTC", - Time: s"2025-11-19 18:56:57 +0000 UTC", - Time: s"2025-11-19 18:57:22 +0000 UTC", - Time: s"2025-11-19 18:58:26 +0000 UTC", - Time: s"2025-11-19 18:58:46 +0000 UTC", - Time: s"2025-11-19 18:59:15 +0000 UTC", - Time: s"2025-11-19 18:59:29 +0000 UTC", - Time: s"2025-11-19 19:01:47 +0000 UTC", - Time: s"2025-11-19 19:01:56 +0000 UTC", - Time: s"2025-11-19 19:02:21 +0000 UTC", - Time: s"2025-11-19 19:03:47 +0000 UTC", - Time: s"2025-11-19 19:04:03 +0000 UTC", - Time: s"2025-11-19 19:04:04 +0000 UTC", - Time: s"2025-11-19 19:07:04 +0000 UTC", Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "acdaa290-9cd9-4407-a6aa-2d320d521219", - UID: "e50567ee-85d6-4a38-8678-69748b6b3020", + UpdatedReplicas: 0, - UpdatedReplicas: 1, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-proxysql-5666d84c9c", - UpdateRevision: "some-name-proxysql-59d94c45bc", - UpdateRevision: "some-name-proxysql-777d6df5f8", - UpdateRevision: "some-name-proxysql-79557d69cd", - UpdateRevision: "some-name-proxysql-865574bdb6", - UpdateRevision: "some-name-proxysql-95fcf5777", - UpdateRevision: "some-name-pxc-57cc6986d7", - UpdateRevision: "some-name-pxc-5cfdb55c79", - UpdateRevision: "some-name-pxc-75df5c477c", - UpdateRevision: "some-name-pxc-7897797f9d", - UpdateRevision: "some-name-pxc-85ddddcf98", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ Value: "", + Value: "caching_sha2_password", ValueFrom: nil, ValueFrom: &v1.EnvVarSource{ - Value: "mysql_native_password", VolumeAttributesClassName: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, - VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-24200 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.hh3r8o0N7d ++ mktemp + local LAST_ERR=/tmp/tmp.ZocDSGQwJn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hh3r8o0N7d perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-24200 namespace + cat /tmp/tmp.ZocDSGQwJn + rm /tmp/tmp.hh3r8o0N7d /tmp/tmp.ZocDSGQwJn + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.KvatTufBQI ++ mktemp + local LAST_ERR=/tmp/tmp.skaIJg0ilj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KvatTufBQI No resources found + cat /tmp/tmp.skaIJg0ilj + rm /tmp/tmp.KvatTufBQI /tmp/tmp.skaIJg0ilj + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Yp8PPtAnVa ++ mktemp + local LAST_ERR=/tmp/tmp.oRxseSmdSh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Yp8PPtAnVa No resources found + cat /tmp/tmp.oRxseSmdSh + rm /tmp/tmp.Yp8PPtAnVa /tmp/tmp.oRxseSmdSh + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.e2cXthRMnK ++ mktemp + local LAST_ERR=/tmp/tmp.mhsQzNMod4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.e2cXthRMnK validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.mhsQzNMod4 + rm /tmp/tmp.e2cXthRMnK /tmp/tmp.mhsQzNMod4 + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-24200 + rm -rf /tmp/tmp.JFgyIkaORV + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.Iv30MEAVeb + desc 'test passed' ++ mktemp + set +o xtrace + local LAST_OUT=/tmp/tmp.LXLyWEqN56 ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_ERR=/tmp/tmp.wUWjc4hmVy + local exit_status=0 + local LAST_ERR=/tmp/tmp.telOFATHkp ++ seq 0 2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-24200 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator