Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/logs/users-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-29404 + local ns=users-29404 + '[' -n pxc-operator ']' + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get pxc --all-namespaces -o wide + kubectl patch pxc -n users-31409 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.v7N3Umr1VU ++ mktemp + local LAST_ERR=/tmp/tmp.OBwKqKiGGO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.v7N3Umr1VU perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-31409 namespace + cat /tmp/tmp.OBwKqKiGGO + rm /tmp/tmp.v7N3Umr1VU /tmp/tmp.OBwKqKiGGO + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.96qyW42VfT ++ mktemp + local LAST_ERR=/tmp/tmp.XoBjyK95UY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.96qyW42VfT No resources found + cat /tmp/tmp.XoBjyK95UY + rm /tmp/tmp.96qyW42VfT /tmp/tmp.XoBjyK95UY + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.tIuRZGTKSo ++ mktemp + local LAST_ERR=/tmp/tmp.HEERNcratm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tIuRZGTKSo No resources found + cat /tmp/tmp.HEERNcratm + rm /tmp/tmp.tIuRZGTKSo /tmp/tmp.HEERNcratm + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.0sAciyiSLY + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.d6wSaMeqXM + local exit_status=0 + local LAST_OUT=/tmp/tmp.7JpL3EVPsp ++ seq 0 2 ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + local LAST_ERR=/tmp/tmp.zOKqg4tVAm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7JpL3EVPsp + cat /tmp/tmp.zOKqg4tVAm + rm /tmp/tmp.7JpL3EVPsp /tmp/tmp.zOKqg4tVAm + return 0 namespace "users-31409" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0sAciyiSLY namespace "pxc-operator" deleted + cat /tmp/tmp.d6wSaMeqXM + rm /tmp/tmp.0sAciyiSLY /tmp/tmp.d6wSaMeqXM + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.2vkgLlInlR ++ mktemp + local LAST_ERR=/tmp/tmp.OsC7XDaaGS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2vkgLlInlR namespace/pxc-operator created + cat /tmp/tmp.OsC7XDaaGS + rm /tmp/tmp.2vkgLlInlR /tmp/tmp.OsC7XDaaGS + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.kf4Ol8OTqj +++ mktemp ++ local LAST_ERR=/tmp/tmp.7OrTo3SOvY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kf4Ol8OTqj ++ cat /tmp/tmp.7OrTo3SOvY ++ rm /tmp/tmp.kf4Ol8OTqj /tmp/tmp.7OrTo3SOvY ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2213-6c08ea71-7-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.nnXJQ27yKr ++ mktemp + local LAST_ERR=/tmp/tmp.9waB3aFN5P + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2213-6c08ea71-7-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nnXJQ27yKr Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2213-6c08ea71-7-cluster8" modified. + cat /tmp/tmp.9waB3aFN5P + rm /tmp/tmp.nnXJQ27yKr /tmp/tmp.9waB3aFN5P + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.BvX5EBt6pg ++ mktemp + local LAST_ERR=/tmp/tmp.QEKZOFG810 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BvX5EBt6pg customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.QEKZOFG810 + rm /tmp/tmp.BvX5EBt6pg /tmp/tmp.QEKZOFG810 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.SNvXWqh2x0 ++ mktemp + local LAST_ERR=/tmp/tmp.dd1Jh3pAsp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SNvXWqh2x0 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.dd1Jh3pAsp + rm /tmp/tmp.SNvXWqh2x0 /tmp/tmp.dd1Jh3pAsp + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2213-6c08ea71^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + local LAST_OUT=/tmp/tmp.OkH1Z0rIQT ++ mktemp + local LAST_ERR=/tmp/tmp.7sZ68YJIo5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OkH1Z0rIQT deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.7sZ68YJIo5 + rm /tmp/tmp.OkH1Z0rIQT /tmp/tmp.7sZ68YJIo5 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.DdmApAU5cY ++ mktemp + local LAST_ERR=/tmp/tmp.5p1ee4NJeB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DdmApAU5cY pod/percona-xtradb-cluster-operator-f5b849cf6-rz4rd condition met + cat /tmp/tmp.5p1ee4NJeB + rm /tmp/tmp.DdmApAU5cY /tmp/tmp.5p1ee4NJeB + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.k1JxfD5l1M +++ mktemp ++ local LAST_ERR=/tmp/tmp.ftR3mxczoJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k1JxfD5l1M ++ cat /tmp/tmp.ftR3mxczoJ ++ rm /tmp/tmp.k1JxfD5l1M /tmp/tmp.ftR3mxczoJ ++ return 0 + wait_pod percona-xtradb-cluster-operator-f5b849cf6-rz4rd 480 pxc-operator + local pod=percona-xtradb-cluster-operator-f5b849cf6-rz4rd + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-f5b849cf6-rz4rd ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-f5b849cf6-rz4rd condition met waiting for pod/percona-xtradb-cluster-operator-f5b849cf6-rz4rd to become Ready.Ok + sleep 3 + create_namespace users-29404 + local namespace=users-29404 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces users-29404' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-29404 ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + kubectl_bin delete namespace users-29404 + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.q0XcnhUaqy ++ mktemp + local LAST_ERR=/tmp/tmp.uSOVqYbIvX + local exit_status=0 ++ seq 0 2 ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl get ns + local LAST_OUT=/tmp/tmp.aI5A6c9NGT ++ mktemp + local LAST_ERR=/tmp/tmp.1FvsfdTm0z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-29404 + awk '{print$1}' + xargs kubectl delete ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-29404 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.q0XcnhUaqy + cat /tmp/tmp.uSOVqYbIvX + rm /tmp/tmp.q0XcnhUaqy /tmp/tmp.uSOVqYbIvX + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-29404 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.aI5A6c9NGT + cat /tmp/tmp.1FvsfdTm0z Error from server (NotFound): namespaces "users-29404" not found + rm /tmp/tmp.aI5A6c9NGT /tmp/tmp.1FvsfdTm0z + return 1 + : + wait_for_delete namespace/users-29404 + local res=namespace/users-29404 + echo -n 'waiting for namespace/users-29404 to be deleted' waiting for namespace/users-29404 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-29404" not found + desc 'create namespace users-29404' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-29404 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-29404 ++ mktemp + local LAST_OUT=/tmp/tmp.EOzewim0WB ++ mktemp + local LAST_ERR=/tmp/tmp.jVq7v33ta4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-29404 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EOzewim0WB namespace/users-29404 created + cat /tmp/tmp.jVq7v33ta4 + rm /tmp/tmp.EOzewim0WB /tmp/tmp.jVq7v33ta4 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.eWQHhWkfM5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.QbltWKcOSU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eWQHhWkfM5 ++ cat /tmp/tmp.QbltWKcOSU ++ rm /tmp/tmp.eWQHhWkfM5 /tmp/tmp.QbltWKcOSU ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2213-6c08ea71-7-cluster8 --namespace=users-29404 ++ mktemp + local LAST_OUT=/tmp/tmp.0zRsbQ5wvK ++ mktemp + local LAST_ERR=/tmp/tmp.M75NbUeqx4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2213-6c08ea71-7-cluster8 --namespace=users-29404 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0zRsbQ5wvK Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2213-6c08ea71-7-cluster8" modified. + cat /tmp/tmp.M75NbUeqx4 + rm /tmp/tmp.0zRsbQ5wvK /tmp/tmp.M75NbUeqx4 + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.JWSgvoVGwF ++ mktemp + local LAST_ERR=/tmp/tmp.tWbRJ9ByKW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JWSgvoVGwF secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.tWbRJ9ByKW + rm /tmp/tmp.JWSgvoVGwF /tmp/tmp.tWbRJ9ByKW + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.TQtWtTwwTK ++ mktemp + local LAST_ERR=/tmp/tmp.rHbZDzHOLa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TQtWtTwwTK secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.rHbZDzHOLa + rm /tmp/tmp.TQtWtTwwTK /tmp/tmp.rHbZDzHOLa + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/client.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + local LAST_OUT=/tmp/tmp.q9APEbfte8 + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/client.yml ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-29404~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.QS3iM9fJlG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2213-6c08ea71#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.q9APEbfte8 deployment.apps/pxc-client created + cat /tmp/tmp.QS3iM9fJlG + rm /tmp/tmp.q9APEbfte8 /tmp/tmp.QS3iM9fJlG + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-29404~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2213-6c08ea71#' ++ mktemp + local LAST_OUT=/tmp/tmp.0QORXvYYWw ++ mktemp + local LAST_ERR=/tmp/tmp.Z8Qsk6OIp6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0QORXvYYWw perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.Z8Qsk6OIp6 + rm /tmp/tmp.0QORXvYYWw /tmp/tmp.Z8Qsk6OIp6 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.TototCw8tK ++++ mktemp +++ local LAST_ERR=/tmp/tmp.iG2y0ALY7Z +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.TototCw8tK +++ cat /tmp/tmp.iG2y0ALY7Z +++ rm /tmp/tmp.TototCw8tK /tmp/tmp.iG2y0ALY7Z +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Kec5Tinh34 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.4WYLiNu9ZR +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.Kec5Tinh34 +++ cat /tmp/tmp.4WYLiNu9ZR +++ rm /tmp/tmp.Kec5Tinh34 /tmp/tmp.4WYLiNu9ZR +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-29404 ++ mktemp + local LAST_OUT=/tmp/tmp.BkWGb7FGJX ++ mktemp + local LAST_ERR=/tmp/tmp.zRSZ1ICoCP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-29404 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-29404 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-29404 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.BkWGb7FGJX + cat /tmp/tmp.zRSZ1ICoCP error: no matching resources found + rm /tmp/tmp.BkWGb7FGJX /tmp/tmp.zRSZ1ICoCP + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.XRA4CSY7Hb +++ mktemp ++ local LAST_ERR=/tmp/tmp.c7CDzsNVIx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XRA4CSY7Hb ++ cat /tmp/tmp.c7CDzsNVIx ++ rm /tmp/tmp.XRA4CSY7Hb /tmp/tmp.c7CDzsNVIx ++ return 0 + local 'root_pass=)2vAr3I03)+ejBe7l+C' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hip57y9eLj +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ckcyap3AW5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hip57y9eLj ++ cat /tmp/tmp.Ckcyap3AW5 ++ rm /tmp/tmp.Hip57y9eLj /tmp/tmp.Ckcyap3AW5 ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UI0g5e17rP +++ mktemp ++ local LAST_ERR=/tmp/tmp.p4ZNcUDVRv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UI0g5e17rP ++ cat /tmp/tmp.p4ZNcUDVRv ++ rm /tmp/tmp.UI0g5e17rP /tmp/tmp.p4ZNcUDVRv ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FSHoJ8Pl42 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jyad9AoO69 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FSHoJ8Pl42 ++ cat /tmp/tmp.jyad9AoO69 ++ rm /tmp/tmp.FSHoJ8Pl42 /tmp/tmp.jyad9AoO69 ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-1.sql /tmp/tmp.sCrDh3TiZm/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G3RuQJiHLf +++ mktemp ++ local LAST_ERR=/tmp/tmp.o1KBdClFK5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G3RuQJiHLf ++ cat /tmp/tmp.o1KBdClFK5 ++ rm /tmp/tmp.G3RuQJiHLf /tmp/tmp.o1KBdClFK5 ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-1.sql /tmp/tmp.sCrDh3TiZm/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\'')2vAr3I03)+ejBe7l+C'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2C2n4vD1AW +++ mktemp ++ local LAST_ERR=/tmp/tmp.Liq3TLPmC0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2C2n4vD1AW ++ cat /tmp/tmp.Liq3TLPmC0 ++ rm /tmp/tmp.2C2n4vD1AW /tmp/tmp.Liq3TLPmC0 ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-1.sql /tmp/tmp.sCrDh3TiZm/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kgH8UXMi9Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.hJnT9CS2If ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kgH8UXMi9Q ++ cat /tmp/tmp.hJnT9CS2If Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.kgH8UXMi9Q /tmp/tmp.hJnT9CS2If ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.u6w4LyOU57 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dYkddhruMo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u6w4LyOU57 ++ cat /tmp/tmp.dYkddhruMo ++ rm /tmp/tmp.u6w4LyOU57 /tmp/tmp.dYkddhruMo ++ return 0 + secret_pass=')2vAr3I03)+ejBe7l+C' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.PWyvHJz45i +++ mktemp ++ local LAST_ERR=/tmp/tmp.smYxxmU5j8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PWyvHJz45i ++ cat /tmp/tmp.smYxxmU5j8 ++ rm /tmp/tmp.PWyvHJz45i /tmp/tmp.smYxxmU5j8 ++ return 0 + int_secret_pass=')2vAr3I03)+ejBe7l+C' + [[ -z )2vAr3I03)+ejBe7l+C ]] + [[ )2vAr3I03)+ejBe7l+C != \)\2\v\A\r\3\I\0\3\)\+\e\j\B\e\7\l\+\C ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\'')2vAr3I03)+ejBe7l+C'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\'')2vAr3I03)+ejBe7l+C'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\'')2vAr3I03)+ejBe7l+C'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\'')2vAr3I03)+ejBe7l+C'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5kxQl6EuF2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dZsRLrlNxl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5kxQl6EuF2 ++ cat /tmp/tmp.dZsRLrlNxl ++ rm /tmp/tmp.5kxQl6EuF2 /tmp/tmp.dZsRLrlNxl ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.sCrDh3TiZm/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.rms2Ka98UT +++ mktemp ++ local LAST_ERR=/tmp/tmp.oEBjLBWBH7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rms2Ka98UT ++ cat /tmp/tmp.oEBjLBWBH7 ++ rm /tmp/tmp.rms2Ka98UT /tmp/tmp.oEBjLBWBH7 ++ return 0 + secret_pass='n!~m)_%sBLb#lqX@gmy' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qy04qcLOjy +++ mktemp ++ local LAST_ERR=/tmp/tmp.xrDa87j6eT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qy04qcLOjy ++ cat /tmp/tmp.xrDa87j6eT ++ rm /tmp/tmp.Qy04qcLOjy /tmp/tmp.xrDa87j6eT ++ return 0 + int_secret_pass='n!~m)_%sBLb#lqX@gmy' + [[ -z n!~m)_%sBLb#lqX@gmy ]] + [[ n!~m)_%sBLb#lqX@gmy != \n\!\~\m\)\_\%\s\B\L\b\#\l\q\X\@\g\m\y ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''n!~m)_%sBLb#lqX@gmy'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''n!~m)_%sBLb#lqX@gmy'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''n!~m)_%sBLb#lqX@gmy'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''n!~m)_%sBLb#lqX@gmy'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H77FyBiVOC +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y2Hq49AuFi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H77FyBiVOC ++ cat /tmp/tmp.Y2Hq49AuFi ++ rm /tmp/tmp.H77FyBiVOC /tmp/tmp.Y2Hq49AuFi ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.sCrDh3TiZm/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.MEX14SF3XG +++ mktemp ++ local LAST_ERR=/tmp/tmp.3wpBZpw3O6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MEX14SF3XG ++ cat /tmp/tmp.3wpBZpw3O6 ++ rm /tmp/tmp.MEX14SF3XG /tmp/tmp.3wpBZpw3O6 ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.lYLHTdSXpf +++ mktemp ++ local LAST_ERR=/tmp/tmp.l8Q1EtUGOn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lYLHTdSXpf ++ cat /tmp/tmp.l8Q1EtUGOn ++ rm /tmp/tmp.lYLHTdSXpf /tmp/tmp.l8Q1EtUGOn ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BhKEdRRV9R +++ mktemp ++ local LAST_ERR=/tmp/tmp.uS8BggJQPp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BhKEdRRV9R ++ cat /tmp/tmp.uS8BggJQPp ++ rm /tmp/tmp.BhKEdRRV9R /tmp/tmp.uS8BggJQPp ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.sCrDh3TiZm/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.19BP1Stubv +++ mktemp ++ local LAST_ERR=/tmp/tmp.K2vTo3cC0s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.19BP1Stubv ++ cat /tmp/tmp.K2vTo3cC0s ++ rm /tmp/tmp.19BP1Stubv /tmp/tmp.K2vTo3cC0s ++ return 0 + secret_pass='giT$bm1FwS.6+]c8,' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.OdTCxA5UGm +++ mktemp ++ local LAST_ERR=/tmp/tmp.3s89KISsdP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OdTCxA5UGm ++ cat /tmp/tmp.3s89KISsdP ++ rm /tmp/tmp.OdTCxA5UGm /tmp/tmp.3s89KISsdP ++ return 0 + int_secret_pass='giT$bm1FwS.6+]c8,' + [[ -z giT$bm1FwS.6+]c8, ]] + [[ giT$bm1FwS.6+]c8, != \g\i\T\$\b\m\1\F\w\S\.\6\+\]\c\8\, ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''giT$bm1FwS.6+]c8,'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''giT$bm1FwS.6+]c8,'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''giT$bm1FwS.6+]c8,'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''giT$bm1FwS.6+]c8,'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-2.sql /tmp/tmp.sCrDh3TiZm/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.bbfK5yXzNB +++ mktemp ++ local LAST_ERR=/tmp/tmp.iEb4U48vRR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bbfK5yXzNB ++ cat /tmp/tmp.iEb4U48vRR ++ rm /tmp/tmp.bbfK5yXzNB /tmp/tmp.iEb4U48vRR ++ return 0 + secret_pass=',*F$i8bn514MW_Crj{D' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.toCuXbjwBQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.XTFGaITV5T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.toCuXbjwBQ ++ cat /tmp/tmp.XTFGaITV5T ++ rm /tmp/tmp.toCuXbjwBQ /tmp/tmp.XTFGaITV5T ++ return 0 + int_secret_pass=',*F$i8bn514MW_Crj{D' + [[ -z ,*F$i8bn514MW_Crj{D ]] + [[ ,*F$i8bn514MW_Crj{D != \,\*\F\$\i\8\b\n\5\1\4\M\W\_\C\r\j\{\D ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\'',*F$i8bn514MW_Crj{D'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\'',*F$i8bn514MW_Crj{D'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\'',*F$i8bn514MW_Crj{D'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\'',*F$i8bn514MW_Crj{D'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hL1qnrp8rb +++ mktemp ++ local LAST_ERR=/tmp/tmp.wMxIWTjolG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hL1qnrp8rb ++ cat /tmp/tmp.wMxIWTjolG ++ rm /tmp/tmp.hL1qnrp8rb /tmp/tmp.wMxIWTjolG ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.sCrDh3TiZm/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.qzv24y6Cr7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DX9MQ07HDU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qzv24y6Cr7 ++ cat /tmp/tmp.DX9MQ07HDU ++ rm /tmp/tmp.qzv24y6Cr7 /tmp/tmp.DX9MQ07HDU ++ return 0 + secret_pass='BhI{MX?9cynb*&XKTW9' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.MYkQCy8iqQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.W0xIXSmL8w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MYkQCy8iqQ ++ cat /tmp/tmp.W0xIXSmL8w ++ rm /tmp/tmp.MYkQCy8iqQ /tmp/tmp.W0xIXSmL8w ++ return 0 + int_secret_pass='BhI{MX?9cynb*&XKTW9' + [[ -z BhI{MX?9cynb*&XKTW9 ]] + [[ BhI{MX?9cynb*&XKTW9 != \B\h\I\{\M\X\?\9\c\y\n\b\*\&\X\K\T\W\9 ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''BhI{MX?9cynb*&XKTW9'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''BhI{MX?9cynb*&XKTW9'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''BhI{MX?9cynb*&XKTW9'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''BhI{MX?9cynb*&XKTW9'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KouyWUoWdx +++ mktemp ++ local LAST_ERR=/tmp/tmp.j3N6F3TIIM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KouyWUoWdx ++ cat /tmp/tmp.j3N6F3TIIM ++ rm /tmp/tmp.KouyWUoWdx /tmp/tmp.j3N6F3TIIM ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.sCrDh3TiZm/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.TR5HItaTDm ++ mktemp + local LAST_ERR=/tmp/tmp.NqjSxgk4Px + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TR5HItaTDm secret/my-cluster-secrets patched + cat /tmp/tmp.NqjSxgk4Px + rm /tmp/tmp.TR5HItaTDm /tmp/tmp.NqjSxgk4Px + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UegXorAi07 +++ mktemp ++ local LAST_ERR=/tmp/tmp.15AO1Afzxe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UegXorAi07 ++ cat /tmp/tmp.15AO1Afzxe ++ rm /tmp/tmp.UegXorAi07 /tmp/tmp.15AO1Afzxe ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.sCrDh3TiZm/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.WxmOTyAMXi ++ mktemp + local LAST_ERR=/tmp/tmp.5gd9yqPeA4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WxmOTyAMXi perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.5gd9yqPeA4 + rm /tmp/tmp.WxmOTyAMXi /tmp/tmp.5gd9yqPeA4 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZfMFMmgu9Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z9C9KNJyVT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZfMFMmgu9Q ++ cat /tmp/tmp.Z9C9KNJyVT ++ rm /tmp/tmp.ZfMFMmgu9Q /tmp/tmp.Z9C9KNJyVT ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fSTr2EsEEm +++ mktemp ++ local LAST_ERR=/tmp/tmp.9fGxIV0aiG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fSTr2EsEEm ++ cat /tmp/tmp.9fGxIV0aiG ++ rm /tmp/tmp.fSTr2EsEEm /tmp/tmp.9fGxIV0aiG ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.pZfHRgnDE8 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.rW8gjxvFfa +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.pZfHRgnDE8 +++++ cat /tmp/tmp.rW8gjxvFfa +++++ rm /tmp/tmp.pZfHRgnDE8 /tmp/tmp.rW8gjxvFfa +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.yUCD8C1kpe ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Bx39kMcm7h +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.yUCD8C1kpe +++++ cat /tmp/tmp.Bx39kMcm7h +++++ rm /tmp/tmp.yUCD8C1kpe /tmp/tmp.Bx39kMcm7h +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pUIglrJqVf +++ mktemp ++ local LAST_ERR=/tmp/tmp.s9oEubRrlC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pUIglrJqVf ++ cat /tmp/tmp.s9oEubRrlC ++ rm /tmp/tmp.pUIglrJqVf /tmp/tmp.s9oEubRrlC ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.K97VO9mWma ++ mktemp + local LAST_ERR=/tmp/tmp.d5xAM1zQTB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.K97VO9mWma secret/my-cluster-secrets patched + cat /tmp/tmp.d5xAM1zQTB + rm /tmp/tmp.K97VO9mWma /tmp/tmp.d5xAM1zQTB + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7o2OkDdzQI +++ mktemp ++ local LAST_ERR=/tmp/tmp.KMh8YcrYFi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7o2OkDdzQI ++ cat /tmp/tmp.KMh8YcrYFi ++ rm /tmp/tmp.7o2OkDdzQI /tmp/tmp.KMh8YcrYFi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hV6RitLz81 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Yc4go9GvdP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hV6RitLz81 ++ cat /tmp/tmp.Yc4go9GvdP ++ rm /tmp/tmp.hV6RitLz81 /tmp/tmp.Yc4go9GvdP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FXOHWgV1P2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.acjjh1XwAX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FXOHWgV1P2 ++ cat /tmp/tmp.acjjh1XwAX ++ rm /tmp/tmp.FXOHWgV1P2 /tmp/tmp.acjjh1XwAX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mnZkW8ud7v +++ mktemp ++ local LAST_ERR=/tmp/tmp.aYxYyfCXny ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mnZkW8ud7v ++ cat /tmp/tmp.aYxYyfCXny ++ rm /tmp/tmp.mnZkW8ud7v /tmp/tmp.aYxYyfCXny ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2D1mzZw5fA +++ mktemp ++ local LAST_ERR=/tmp/tmp.qhOQwWGspo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2D1mzZw5fA ++ cat /tmp/tmp.qhOQwWGspo ++ rm /tmp/tmp.2D1mzZw5fA /tmp/tmp.qhOQwWGspo ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6hNNyazuqv +++ mktemp ++ local LAST_ERR=/tmp/tmp.eVocIXyUfd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6hNNyazuqv ++ cat /tmp/tmp.eVocIXyUfd ++ rm /tmp/tmp.6hNNyazuqv /tmp/tmp.eVocIXyUfd ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.0Fi5GRCYrA ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.GNGw9qwHFW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.0Fi5GRCYrA +++++ cat /tmp/tmp.GNGw9qwHFW +++++ rm /tmp/tmp.0Fi5GRCYrA /tmp/tmp.GNGw9qwHFW +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xLzrwe2bH9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.e21xDvH2j6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xLzrwe2bH9 +++++ cat /tmp/tmp.e21xDvH2j6 +++++ rm /tmp/tmp.xLzrwe2bH9 /tmp/tmp.e21xDvH2j6 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kPl7Fdxhhl +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q429TAjkwB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kPl7Fdxhhl ++ cat /tmp/tmp.Q429TAjkwB ++ rm /tmp/tmp.kPl7Fdxhhl /tmp/tmp.Q429TAjkwB ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-2.sql /tmp/tmp.sCrDh3TiZm/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-2.sql /tmp/tmp.sCrDh3TiZm/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-2.sql /tmp/tmp.sCrDh3TiZm/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.c7g8eurn4Q ++ mktemp + local LAST_ERR=/tmp/tmp.2wA6YzmL9Z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.c7g8eurn4Q perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.2wA6YzmL9Z + rm /tmp/tmp.c7g8eurn4Q /tmp/tmp.2wA6YzmL9Z + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.8WePPKDyk8 ++ mktemp + local LAST_ERR=/tmp/tmp.czKDwx8WAc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8WePPKDyk8 secret/my-cluster-secrets patched + cat /tmp/tmp.czKDwx8WAc + rm /tmp/tmp.8WePPKDyk8 /tmp/tmp.czKDwx8WAc + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lQ7rMVELlk +++ mktemp ++ local LAST_ERR=/tmp/tmp.hVNLUBu5kA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lQ7rMVELlk ++ cat /tmp/tmp.hVNLUBu5kA ++ rm /tmp/tmp.lQ7rMVELlk /tmp/tmp.hVNLUBu5kA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BsO8dcw8XV +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fl1AswFXkH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BsO8dcw8XV ++ cat /tmp/tmp.Fl1AswFXkH ++ rm /tmp/tmp.BsO8dcw8XV /tmp/tmp.Fl1AswFXkH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZQv1p69VZV +++ mktemp ++ local LAST_ERR=/tmp/tmp.izm3PkdjAL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZQv1p69VZV ++ cat /tmp/tmp.izm3PkdjAL ++ rm /tmp/tmp.ZQv1p69VZV /tmp/tmp.izm3PkdjAL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xD3Qiu22t3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.55H8JUbc7K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xD3Qiu22t3 ++ cat /tmp/tmp.55H8JUbc7K ++ rm /tmp/tmp.xD3Qiu22t3 /tmp/tmp.55H8JUbc7K ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NY8kNsRFAZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.WJEqkmVPts ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NY8kNsRFAZ ++ cat /tmp/tmp.WJEqkmVPts ++ rm /tmp/tmp.NY8kNsRFAZ /tmp/tmp.WJEqkmVPts ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6QMEEi2eiN +++ mktemp ++ local LAST_ERR=/tmp/tmp.KpoQ908mbE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6QMEEi2eiN ++ cat /tmp/tmp.KpoQ908mbE ++ rm /tmp/tmp.6QMEEi2eiN /tmp/tmp.KpoQ908mbE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tIiZouUfpF +++ mktemp ++ local LAST_ERR=/tmp/tmp.G1YEJ4FoP5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tIiZouUfpF ++ cat /tmp/tmp.G1YEJ4FoP5 ++ rm /tmp/tmp.tIiZouUfpF /tmp/tmp.G1YEJ4FoP5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6IGhEnWJt3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0Ki9N9pJgD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6IGhEnWJt3 ++ cat /tmp/tmp.0Ki9N9pJgD ++ rm /tmp/tmp.6IGhEnWJt3 /tmp/tmp.0Ki9N9pJgD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RuMZum29p8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uPhcgdjhdR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RuMZum29p8 ++ cat /tmp/tmp.uPhcgdjhdR ++ rm /tmp/tmp.RuMZum29p8 /tmp/tmp.uPhcgdjhdR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YpuEy5k72Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.miqsO3Y2bw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YpuEy5k72Y ++ cat /tmp/tmp.miqsO3Y2bw ++ rm /tmp/tmp.YpuEy5k72Y /tmp/tmp.miqsO3Y2bw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cC5inCpUzz +++ mktemp ++ local LAST_ERR=/tmp/tmp.E69n6sRPon ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cC5inCpUzz ++ cat /tmp/tmp.E69n6sRPon ++ rm /tmp/tmp.cC5inCpUzz /tmp/tmp.E69n6sRPon ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.STkEOCqydQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Chd79loifA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.STkEOCqydQ ++ cat /tmp/tmp.Chd79loifA ++ rm /tmp/tmp.STkEOCqydQ /tmp/tmp.Chd79loifA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z1n9o0EYkK +++ mktemp ++ local LAST_ERR=/tmp/tmp.bi2vReSi1D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z1n9o0EYkK ++ cat /tmp/tmp.bi2vReSi1D ++ rm /tmp/tmp.z1n9o0EYkK /tmp/tmp.bi2vReSi1D ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6GyKxszNcZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.LN30yi8WRz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6GyKxszNcZ ++ cat /tmp/tmp.LN30yi8WRz ++ rm /tmp/tmp.6GyKxszNcZ /tmp/tmp.LN30yi8WRz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4hk0yozF6J +++ mktemp ++ local LAST_ERR=/tmp/tmp.OhQPKmOqTf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4hk0yozF6J ++ cat /tmp/tmp.OhQPKmOqTf ++ rm /tmp/tmp.4hk0yozF6J /tmp/tmp.OhQPKmOqTf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uIRhSWZoCZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.nHBL1vmXzC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uIRhSWZoCZ ++ cat /tmp/tmp.nHBL1vmXzC ++ rm /tmp/tmp.uIRhSWZoCZ /tmp/tmp.nHBL1vmXzC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6wBVhNBTnM +++ mktemp ++ local LAST_ERR=/tmp/tmp.fTj9ETjatm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6wBVhNBTnM ++ cat /tmp/tmp.fTj9ETjatm ++ rm /tmp/tmp.6wBVhNBTnM /tmp/tmp.fTj9ETjatm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8P7niAC6zb +++ mktemp ++ local LAST_ERR=/tmp/tmp.CofXPU2TAQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8P7niAC6zb ++ cat /tmp/tmp.CofXPU2TAQ ++ rm /tmp/tmp.8P7niAC6zb /tmp/tmp.CofXPU2TAQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FdKk0eO0SE +++ mktemp ++ local LAST_ERR=/tmp/tmp.etTHJv7kwp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FdKk0eO0SE ++ cat /tmp/tmp.etTHJv7kwp ++ rm /tmp/tmp.FdKk0eO0SE /tmp/tmp.etTHJv7kwp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.viSApCJIOM +++ mktemp ++ local LAST_ERR=/tmp/tmp.aASFT3XQI1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.viSApCJIOM ++ cat /tmp/tmp.aASFT3XQI1 ++ rm /tmp/tmp.viSApCJIOM /tmp/tmp.aASFT3XQI1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U5wcviIvGG +++ mktemp ++ local LAST_ERR=/tmp/tmp.CLcsnynung ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U5wcviIvGG ++ cat /tmp/tmp.CLcsnynung ++ rm /tmp/tmp.U5wcviIvGG /tmp/tmp.CLcsnynung ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LvOQpVKPlX +++ mktemp ++ local LAST_ERR=/tmp/tmp.7uWKdyBudw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LvOQpVKPlX ++ cat /tmp/tmp.7uWKdyBudw ++ rm /tmp/tmp.LvOQpVKPlX /tmp/tmp.7uWKdyBudw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k8TfYPJcBs +++ mktemp ++ local LAST_ERR=/tmp/tmp.NVZUYOAU8q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k8TfYPJcBs ++ cat /tmp/tmp.NVZUYOAU8q ++ rm /tmp/tmp.k8TfYPJcBs /tmp/tmp.NVZUYOAU8q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zqeYVM0UVi +++ mktemp ++ local LAST_ERR=/tmp/tmp.RbiHNVwdDN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zqeYVM0UVi ++ cat /tmp/tmp.RbiHNVwdDN ++ rm /tmp/tmp.zqeYVM0UVi /tmp/tmp.RbiHNVwdDN ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tPVFlw2jXA +++ mktemp ++ local LAST_ERR=/tmp/tmp.SpsFCuM6qv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tPVFlw2jXA ++ cat /tmp/tmp.SpsFCuM6qv ++ rm /tmp/tmp.tPVFlw2jXA /tmp/tmp.SpsFCuM6qv ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.HN506inn46 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.IE0Fsm2jpA +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.HN506inn46 +++++ cat /tmp/tmp.IE0Fsm2jpA +++++ rm /tmp/tmp.HN506inn46 /tmp/tmp.IE0Fsm2jpA +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2y366vqNQs ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.D71BNSLggY +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2y366vqNQs +++++ cat /tmp/tmp.D71BNSLggY +++++ rm /tmp/tmp.2y366vqNQs /tmp/tmp.D71BNSLggY +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XKhXQMJ4Ve +++ mktemp ++ local LAST_ERR=/tmp/tmp.LK6ENVyNHP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XKhXQMJ4Ve ++ cat /tmp/tmp.LK6ENVyNHP ++ rm /tmp/tmp.XKhXQMJ4Ve /tmp/tmp.LK6ENVyNHP ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-3-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-3.sql /tmp/tmp.sCrDh3TiZm/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Js7WBHHEvj ++ mktemp + local LAST_ERR=/tmp/tmp.Vag1BlC3kq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Js7WBHHEvj secret/my-cluster-secrets patched + cat /tmp/tmp.Vag1BlC3kq + rm /tmp/tmp.Js7WBHHEvj /tmp/tmp.Vag1BlC3kq + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.8xt1tCBHSy +++ mktemp ++ local LAST_ERR=/tmp/tmp.fbvAZW1GN3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8xt1tCBHSy ++ cat /tmp/tmp.fbvAZW1GN3 ++ rm /tmp/tmp.8xt1tCBHSy /tmp/tmp.fbvAZW1GN3 ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OfHlsk9B3Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.bgNxc0B72u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OfHlsk9B3Q ++ cat /tmp/tmp.bgNxc0B72u ++ rm /tmp/tmp.OfHlsk9B3Q /tmp/tmp.bgNxc0B72u ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tcKhlOR03B +++ mktemp ++ local LAST_ERR=/tmp/tmp.wyDGkGHFuQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tcKhlOR03B ++ cat /tmp/tmp.wyDGkGHFuQ ++ rm /tmp/tmp.tcKhlOR03B /tmp/tmp.wyDGkGHFuQ ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X6RBXGnor8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.l7P86VzRJF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X6RBXGnor8 ++ cat /tmp/tmp.l7P86VzRJF ++ rm /tmp/tmp.X6RBXGnor8 /tmp/tmp.l7P86VzRJF ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EiD40J8aS2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Yh2O8cDCvJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EiD40J8aS2 ++ cat /tmp/tmp.Yh2O8cDCvJ ++ rm /tmp/tmp.EiD40J8aS2 /tmp/tmp.Yh2O8cDCvJ ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dglnRj78N5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hYrIRS3zdv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dglnRj78N5 ++ cat /tmp/tmp.hYrIRS3zdv ++ rm /tmp/tmp.dglnRj78N5 /tmp/tmp.hYrIRS3zdv ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.urU5k8PDom +++ mktemp ++ local LAST_ERR=/tmp/tmp.vmlB26jRUI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.urU5k8PDom ++ cat /tmp/tmp.vmlB26jRUI ++ rm /tmp/tmp.urU5k8PDom /tmp/tmp.vmlB26jRUI ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X4n0ycZBsC +++ mktemp ++ local LAST_ERR=/tmp/tmp.ihwfXE8gft ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X4n0ycZBsC ++ cat /tmp/tmp.ihwfXE8gft ++ rm /tmp/tmp.X4n0ycZBsC /tmp/tmp.ihwfXE8gft ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ak1WUaGm0O +++ mktemp ++ local LAST_ERR=/tmp/tmp.2tWXUD6CXg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ak1WUaGm0O ++ cat /tmp/tmp.2tWXUD6CXg ++ rm /tmp/tmp.ak1WUaGm0O /tmp/tmp.2tWXUD6CXg ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uQMxakCfkR +++ mktemp ++ local LAST_ERR=/tmp/tmp.sm7e2R4yAx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uQMxakCfkR ++ cat /tmp/tmp.sm7e2R4yAx ++ rm /tmp/tmp.uQMxakCfkR /tmp/tmp.sm7e2R4yAx ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r5NxeI5jXB +++ mktemp ++ local LAST_ERR=/tmp/tmp.uV3x9v2Jq6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r5NxeI5jXB ++ cat /tmp/tmp.uV3x9v2Jq6 ++ rm /tmp/tmp.r5NxeI5jXB /tmp/tmp.uV3x9v2Jq6 ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JCSNhJRNBg +++ mktemp ++ local LAST_ERR=/tmp/tmp.J6whMXH07v ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JCSNhJRNBg ++ cat /tmp/tmp.J6whMXH07v ++ rm /tmp/tmp.JCSNhJRNBg /tmp/tmp.J6whMXH07v ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lfRcan1t3M +++ mktemp ++ local LAST_ERR=/tmp/tmp.RFaA4XaX0S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lfRcan1t3M ++ cat /tmp/tmp.RFaA4XaX0S ++ rm /tmp/tmp.lfRcan1t3M /tmp/tmp.RFaA4XaX0S ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JB5baTPXan +++ mktemp ++ local LAST_ERR=/tmp/tmp.OSkJlGD5aF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JB5baTPXan ++ cat /tmp/tmp.OSkJlGD5aF ++ rm /tmp/tmp.JB5baTPXan /tmp/tmp.OSkJlGD5aF ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6EQHaWxuEz +++ mktemp ++ local LAST_ERR=/tmp/tmp.USvN4KlpCx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6EQHaWxuEz ++ cat /tmp/tmp.USvN4KlpCx ++ rm /tmp/tmp.6EQHaWxuEz /tmp/tmp.USvN4KlpCx ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 13 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GS3lmITowb +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bzuyh0Z675 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GS3lmITowb ++ cat /tmp/tmp.Bzuyh0Z675 ++ rm /tmp/tmp.GS3lmITowb /tmp/tmp.Bzuyh0Z675 ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 14 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UDEKgZ0pgl +++ mktemp ++ local LAST_ERR=/tmp/tmp.xiPq6VEYRl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UDEKgZ0pgl ++ cat /tmp/tmp.xiPq6VEYRl ++ rm /tmp/tmp.UDEKgZ0pgl /tmp/tmp.xiPq6VEYRl ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 15 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VkJ73S3EJO +++ mktemp ++ local LAST_ERR=/tmp/tmp.Yz2V8MA4oD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VkJ73S3EJO ++ cat /tmp/tmp.Yz2V8MA4oD ++ rm /tmp/tmp.VkJ73S3EJO /tmp/tmp.Yz2V8MA4oD ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.niZfaDaM6P +++ mktemp ++ local LAST_ERR=/tmp/tmp.tkBR5Wm2Br ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.niZfaDaM6P ++ cat /tmp/tmp.tkBR5Wm2Br ++ rm /tmp/tmp.niZfaDaM6P /tmp/tmp.tkBR5Wm2Br ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6DF0HUdgMQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.mRmTHHESJq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6DF0HUdgMQ ++ cat /tmp/tmp.mRmTHHESJq ++ rm /tmp/tmp.6DF0HUdgMQ /tmp/tmp.mRmTHHESJq ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.J7ufnhIOKf ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Kxcicm9Je8 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.J7ufnhIOKf +++++ cat /tmp/tmp.Kxcicm9Je8 +++++ rm /tmp/tmp.J7ufnhIOKf /tmp/tmp.Kxcicm9Je8 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8kS1XMNZ0b ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.E1mmk9qkPN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8kS1XMNZ0b +++++ cat /tmp/tmp.E1mmk9qkPN +++++ rm /tmp/tmp.8kS1XMNZ0b /tmp/tmp.E1mmk9qkPN +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SansAjYIMy +++ mktemp ++ local LAST_ERR=/tmp/tmp.sdRQ3w9ubo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SansAjYIMy ++ cat /tmp/tmp.sdRQ3w9ubo ++ rm /tmp/tmp.SansAjYIMy /tmp/tmp.sdRQ3w9ubo ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bAPbTY5cWb +++ mktemp ++ local LAST_ERR=/tmp/tmp.HPEav4vOB6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bAPbTY5cWb ++ cat /tmp/tmp.HPEav4vOB6 ++ rm /tmp/tmp.bAPbTY5cWb /tmp/tmp.HPEav4vOB6 ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.sCrDh3TiZm/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.zi4Vh5BidU ++ mktemp + local LAST_ERR=/tmp/tmp.nlqTWfTr2v + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zi4Vh5BidU secret/my-cluster-secrets patched + cat /tmp/tmp.nlqTWfTr2v + rm /tmp/tmp.zi4Vh5BidU /tmp/tmp.nlqTWfTr2v + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sd3yvRCmBx +++ mktemp ++ local LAST_ERR=/tmp/tmp.1ukRANXdDb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sd3yvRCmBx ++ cat /tmp/tmp.1ukRANXdDb ++ rm /tmp/tmp.sd3yvRCmBx /tmp/tmp.1ukRANXdDb ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.svTCMVVm5N +++ mktemp ++ local LAST_ERR=/tmp/tmp.MOvdIvRB39 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.svTCMVVm5N ++ cat /tmp/tmp.MOvdIvRB39 ++ rm /tmp/tmp.svTCMVVm5N /tmp/tmp.MOvdIvRB39 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vomIeOYpNv ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.YuxHN6491p +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vomIeOYpNv +++++ cat /tmp/tmp.YuxHN6491p +++++ rm /tmp/tmp.vomIeOYpNv /tmp/tmp.YuxHN6491p +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.9wLsbN5dsO ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xMo0n58QUU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.9wLsbN5dsO +++++ cat /tmp/tmp.xMo0n58QUU +++++ rm /tmp/tmp.9wLsbN5dsO /tmp/tmp.xMo0n58QUU +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qTDZgCCdBg +++ mktemp ++ local LAST_ERR=/tmp/tmp.qwiRl8Vdbj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qTDZgCCdBg ++ cat /tmp/tmp.qwiRl8Vdbj ++ rm /tmp/tmp.qTDZgCCdBg /tmp/tmp.qwiRl8Vdbj ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b0te1jIba1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EgVvUhepgv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b0te1jIba1 ++ cat /tmp/tmp.EgVvUhepgv ++ rm /tmp/tmp.b0te1jIba1 /tmp/tmp.EgVvUhepgv ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.sCrDh3TiZm/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.LY1FJKOs0y ++ mktemp + local LAST_ERR=/tmp/tmp.jlCexPZLww + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LY1FJKOs0y perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.jlCexPZLww + rm /tmp/tmp.LY1FJKOs0y /tmp/tmp.jlCexPZLww + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FoHh3kmHZa +++ mktemp ++ local LAST_ERR=/tmp/tmp.pRChWyZuSE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FoHh3kmHZa ++ cat /tmp/tmp.pRChWyZuSE ++ rm /tmp/tmp.FoHh3kmHZa /tmp/tmp.pRChWyZuSE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZOfQBkQtGp +++ mktemp ++ local LAST_ERR=/tmp/tmp.kFBcB4D46b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZOfQBkQtGp ++ cat /tmp/tmp.kFBcB4D46b ++ rm /tmp/tmp.ZOfQBkQtGp /tmp/tmp.kFBcB4D46b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9qieR3xOPK +++ mktemp ++ local LAST_ERR=/tmp/tmp.BRWTZfgrDc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9qieR3xOPK ++ cat /tmp/tmp.BRWTZfgrDc ++ rm /tmp/tmp.9qieR3xOPK /tmp/tmp.BRWTZfgrDc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7axSVyXw0M +++ mktemp ++ local LAST_ERR=/tmp/tmp.XWAMogxej4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7axSVyXw0M ++ cat /tmp/tmp.XWAMogxej4 ++ rm /tmp/tmp.7axSVyXw0M /tmp/tmp.XWAMogxej4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tlpAElwIQT +++ mktemp ++ local LAST_ERR=/tmp/tmp.P15TWLJXnf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tlpAElwIQT ++ cat /tmp/tmp.P15TWLJXnf ++ rm /tmp/tmp.tlpAElwIQT /tmp/tmp.P15TWLJXnf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HY4PyHPc4c +++ mktemp ++ local LAST_ERR=/tmp/tmp.EHe4nZti4h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HY4PyHPc4c ++ cat /tmp/tmp.EHe4nZti4h ++ rm /tmp/tmp.HY4PyHPc4c /tmp/tmp.EHe4nZti4h ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IliS6IXaKz +++ mktemp ++ local LAST_ERR=/tmp/tmp.ERMYZJBEsI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IliS6IXaKz ++ cat /tmp/tmp.ERMYZJBEsI ++ rm /tmp/tmp.IliS6IXaKz /tmp/tmp.ERMYZJBEsI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BnohFpMYEu +++ mktemp ++ local LAST_ERR=/tmp/tmp.rWe5SxrhYi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BnohFpMYEu ++ cat /tmp/tmp.rWe5SxrhYi ++ rm /tmp/tmp.BnohFpMYEu /tmp/tmp.rWe5SxrhYi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q0xmlE5u9f +++ mktemp ++ local LAST_ERR=/tmp/tmp.bxXnQxL07q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q0xmlE5u9f ++ cat /tmp/tmp.bxXnQxL07q ++ rm /tmp/tmp.q0xmlE5u9f /tmp/tmp.bxXnQxL07q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pveev1F0OJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.P9dm6gK5n5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Pveev1F0OJ ++ cat /tmp/tmp.P9dm6gK5n5 ++ rm /tmp/tmp.Pveev1F0OJ /tmp/tmp.P9dm6gK5n5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UHoOjU4D9a +++ mktemp ++ local LAST_ERR=/tmp/tmp.10u0WyvZ8D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UHoOjU4D9a ++ cat /tmp/tmp.10u0WyvZ8D ++ rm /tmp/tmp.UHoOjU4D9a /tmp/tmp.10u0WyvZ8D ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sHaQeLFNaG +++ mktemp ++ local LAST_ERR=/tmp/tmp.PeHJHzhDDD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sHaQeLFNaG ++ cat /tmp/tmp.PeHJHzhDDD ++ rm /tmp/tmp.sHaQeLFNaG /tmp/tmp.PeHJHzhDDD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BfjAQivC5v +++ mktemp ++ local LAST_ERR=/tmp/tmp.yHhgiy7xLA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BfjAQivC5v ++ cat /tmp/tmp.yHhgiy7xLA ++ rm /tmp/tmp.BfjAQivC5v /tmp/tmp.yHhgiy7xLA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QV2T0kk21Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.8Dfo49oHhf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QV2T0kk21Z ++ cat /tmp/tmp.8Dfo49oHhf ++ rm /tmp/tmp.QV2T0kk21Z /tmp/tmp.8Dfo49oHhf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.17b4RCAYdQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.L2xxBItwUW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.17b4RCAYdQ ++ cat /tmp/tmp.L2xxBItwUW ++ rm /tmp/tmp.17b4RCAYdQ /tmp/tmp.L2xxBItwUW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lnM4640ss3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.u4YQP9bCGS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lnM4640ss3 ++ cat /tmp/tmp.u4YQP9bCGS ++ rm /tmp/tmp.lnM4640ss3 /tmp/tmp.u4YQP9bCGS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z5lcW445VU +++ mktemp ++ local LAST_ERR=/tmp/tmp.tfG2JHH30e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z5lcW445VU ++ cat /tmp/tmp.tfG2JHH30e ++ rm /tmp/tmp.Z5lcW445VU /tmp/tmp.tfG2JHH30e ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tPPBQiUGIu +++ mktemp ++ local LAST_ERR=/tmp/tmp.XceDLDNS3x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tPPBQiUGIu ++ cat /tmp/tmp.XceDLDNS3x ++ rm /tmp/tmp.tPPBQiUGIu /tmp/tmp.XceDLDNS3x ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.frWD2TzaQY +++ mktemp ++ local LAST_ERR=/tmp/tmp.VNIL64UnDT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.frWD2TzaQY ++ cat /tmp/tmp.VNIL64UnDT ++ rm /tmp/tmp.frWD2TzaQY /tmp/tmp.VNIL64UnDT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zU9hz7eRDk +++ mktemp ++ local LAST_ERR=/tmp/tmp.060rUYJf5a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zU9hz7eRDk ++ cat /tmp/tmp.060rUYJf5a ++ rm /tmp/tmp.zU9hz7eRDk /tmp/tmp.060rUYJf5a ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IKVor3fIvP +++ mktemp ++ local LAST_ERR=/tmp/tmp.2tYLii45ZR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IKVor3fIvP ++ cat /tmp/tmp.2tYLii45ZR ++ rm /tmp/tmp.IKVor3fIvP /tmp/tmp.2tYLii45ZR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bLyVSNXH57 +++ mktemp ++ local LAST_ERR=/tmp/tmp.irMutJ46fp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bLyVSNXH57 ++ cat /tmp/tmp.irMutJ46fp ++ rm /tmp/tmp.bLyVSNXH57 /tmp/tmp.irMutJ46fp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VN6q9EscWD +++ mktemp ++ local LAST_ERR=/tmp/tmp.WLsVHM4j55 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VN6q9EscWD ++ cat /tmp/tmp.WLsVHM4j55 ++ rm /tmp/tmp.VN6q9EscWD /tmp/tmp.WLsVHM4j55 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.reUVjdFmLG +++ mktemp ++ local LAST_ERR=/tmp/tmp.z90MhjOZgh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.reUVjdFmLG ++ cat /tmp/tmp.z90MhjOZgh ++ rm /tmp/tmp.reUVjdFmLG /tmp/tmp.z90MhjOZgh ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.auZux69ALT +++ mktemp ++ local LAST_ERR=/tmp/tmp.n2iX3ZMSps ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.auZux69ALT ++ cat /tmp/tmp.n2iX3ZMSps ++ rm /tmp/tmp.auZux69ALT /tmp/tmp.n2iX3ZMSps ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.TAMcBbPUw7 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.JhU6w7K4v1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.TAMcBbPUw7 +++++ cat /tmp/tmp.JhU6w7K4v1 +++++ rm /tmp/tmp.TAMcBbPUw7 /tmp/tmp.JhU6w7K4v1 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.HzSAOUmvG3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.q9JhsHEEbB +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.HzSAOUmvG3 +++++ cat /tmp/tmp.q9JhsHEEbB +++++ rm /tmp/tmp.HzSAOUmvG3 /tmp/tmp.q9JhsHEEbB +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9uBGgRzPVk +++ mktemp ++ local LAST_ERR=/tmp/tmp.lgthX0dLIy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9uBGgRzPVk ++ cat /tmp/tmp.lgthX0dLIy ++ rm /tmp/tmp.9uBGgRzPVk /tmp/tmp.lgthX0dLIy ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.MomgLDb9zv ++ mktemp + local LAST_ERR=/tmp/tmp.bHM5fOhNlz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MomgLDb9zv secret/my-cluster-secrets-2 patched + cat /tmp/tmp.bHM5fOhNlz + rm /tmp/tmp.MomgLDb9zv /tmp/tmp.bHM5fOhNlz + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3fdW4OARRK +++ mktemp ++ local LAST_ERR=/tmp/tmp.BBNTgNOvTa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3fdW4OARRK ++ cat /tmp/tmp.BBNTgNOvTa ++ rm /tmp/tmp.3fdW4OARRK /tmp/tmp.BBNTgNOvTa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dHw9gdvFEN +++ mktemp ++ local LAST_ERR=/tmp/tmp.cLhejm17W6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dHw9gdvFEN ++ cat /tmp/tmp.cLhejm17W6 ++ rm /tmp/tmp.dHw9gdvFEN /tmp/tmp.cLhejm17W6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HdtSXbQihV +++ mktemp ++ local LAST_ERR=/tmp/tmp.xxGQFIp0wX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HdtSXbQihV ++ cat /tmp/tmp.xxGQFIp0wX ++ rm /tmp/tmp.HdtSXbQihV /tmp/tmp.xxGQFIp0wX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J4OzSshAH6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ChfGVC1Cn4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J4OzSshAH6 ++ cat /tmp/tmp.ChfGVC1Cn4 ++ rm /tmp/tmp.J4OzSshAH6 /tmp/tmp.ChfGVC1Cn4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p3Ik9PcTT2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BmvnrKdfaa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p3Ik9PcTT2 ++ cat /tmp/tmp.BmvnrKdfaa ++ rm /tmp/tmp.p3Ik9PcTT2 /tmp/tmp.BmvnrKdfaa ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tWhwVVPicw +++ mktemp ++ local LAST_ERR=/tmp/tmp.Av10LxOrNd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tWhwVVPicw ++ cat /tmp/tmp.Av10LxOrNd ++ rm /tmp/tmp.tWhwVVPicw /tmp/tmp.Av10LxOrNd ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.lDvGppvW3h ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.e9fnmbJJ1J +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.lDvGppvW3h +++++ cat /tmp/tmp.e9fnmbJJ1J +++++ rm /tmp/tmp.lDvGppvW3h /tmp/tmp.e9fnmbJJ1J +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.e295BG3ntp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.IRPK4727o9 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.e295BG3ntp +++++ cat /tmp/tmp.IRPK4727o9 +++++ rm /tmp/tmp.e295BG3ntp /tmp/tmp.IRPK4727o9 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7ZvMUZxp6a +++ mktemp ++ local LAST_ERR=/tmp/tmp.WHAhnXQsm8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7ZvMUZxp6a ++ cat /tmp/tmp.WHAhnXQsm8 ++ rm /tmp/tmp.7ZvMUZxp6a /tmp/tmp.WHAhnXQsm8 ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5WzIzQH9h0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8u0P2jnler ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5WzIzQH9h0 ++ cat /tmp/tmp.8u0P2jnler ++ rm /tmp/tmp.5WzIzQH9h0 /tmp/tmp.8u0P2jnler ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.sCrDh3TiZm/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.7UzVaNCEwe +++ mktemp ++ local LAST_ERR=/tmp/tmp.vPn1d7NCsH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7UzVaNCEwe ++ cat /tmp/tmp.vPn1d7NCsH ++ rm /tmp/tmp.7UzVaNCEwe /tmp/tmp.vPn1d7NCsH ++ return 0 + newpass='QqpA{r-V9i5.HqwZCh8' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''QqpA{r-V9i5.HqwZCh8'\'';' '-h some-name-pxc -uroot -p'\''QqpA{r-V9i5.HqwZCh8'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''QqpA{r-V9i5.HqwZCh8'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''QqpA{r-V9i5.HqwZCh8'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aWtMRpf7SZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.FzEWR3Dg3H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aWtMRpf7SZ ++ cat /tmp/tmp.FzEWR3Dg3H ++ rm /tmp/tmp.aWtMRpf7SZ /tmp/tmp.FzEWR3Dg3H ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''QqpA{r-V9i5.HqwZCh8'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''QqpA{r-V9i5.HqwZCh8'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''QqpA{r-V9i5.HqwZCh8'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''QqpA{r-V9i5.HqwZCh8'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0Z0TUjm9ig +++ mktemp ++ local LAST_ERR=/tmp/tmp.LJWVjFExLx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0Z0TUjm9ig ++ cat /tmp/tmp.LJWVjFExLx ++ rm /tmp/tmp.0Z0TUjm9ig /tmp/tmp.LJWVjFExLx ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.sCrDh3TiZm/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.1w7UCzMx8S +++ mktemp ++ local LAST_ERR=/tmp/tmp.yNGgjWKdzK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1w7UCzMx8S ++ cat /tmp/tmp.yNGgjWKdzK ++ rm /tmp/tmp.1w7UCzMx8S /tmp/tmp.yNGgjWKdzK ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.SfqQTMMT4A ++ mktemp + local LAST_ERR=/tmp/tmp.zlMfXFwHls + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SfqQTMMT4A secret/my-cluster-secrets-2 configured + cat /tmp/tmp.zlMfXFwHls Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.SfqQTMMT4A /tmp/tmp.zlMfXFwHls + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KnGNfIaBCs +++ mktemp ++ local LAST_ERR=/tmp/tmp.GxJYtdOid4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KnGNfIaBCs ++ cat /tmp/tmp.GxJYtdOid4 ++ rm /tmp/tmp.KnGNfIaBCs /tmp/tmp.GxJYtdOid4 ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.sCrDh3TiZm/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.TaAj1VP32b + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2213-6c08ea71#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-29404~ + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + local LAST_ERR=/tmp/tmp.1JhxjlzLwr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TaAj1VP32b perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.1JhxjlzLwr + rm /tmp/tmp.TaAj1VP32b /tmp/tmp.1JhxjlzLwr + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LTiepMiPpx +++ mktemp ++ local LAST_ERR=/tmp/tmp.kaurPhD0f2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LTiepMiPpx ++ cat /tmp/tmp.kaurPhD0f2 ++ rm /tmp/tmp.LTiepMiPpx /tmp/tmp.kaurPhD0f2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ck96NBAo9G +++ mktemp ++ local LAST_ERR=/tmp/tmp.JTiWjtbxMk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ck96NBAo9G ++ cat /tmp/tmp.JTiWjtbxMk ++ rm /tmp/tmp.Ck96NBAo9G /tmp/tmp.JTiWjtbxMk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GROMVJvNL4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SPlwSs6eg0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GROMVJvNL4 ++ cat /tmp/tmp.SPlwSs6eg0 ++ rm /tmp/tmp.GROMVJvNL4 /tmp/tmp.SPlwSs6eg0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ApOwGbs7Rm +++ mktemp ++ local LAST_ERR=/tmp/tmp.jYMBYMAZuS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ApOwGbs7Rm ++ cat /tmp/tmp.jYMBYMAZuS ++ rm /tmp/tmp.ApOwGbs7Rm /tmp/tmp.jYMBYMAZuS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rCR6DzGFUt +++ mktemp ++ local LAST_ERR=/tmp/tmp.tpNkxYAamW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rCR6DzGFUt ++ cat /tmp/tmp.tpNkxYAamW ++ rm /tmp/tmp.rCR6DzGFUt /tmp/tmp.tpNkxYAamW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bhnQmBx8hy +++ mktemp ++ local LAST_ERR=/tmp/tmp.S5L98dXfk0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bhnQmBx8hy ++ cat /tmp/tmp.S5L98dXfk0 ++ rm /tmp/tmp.bhnQmBx8hy /tmp/tmp.S5L98dXfk0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TArtv0ep6M +++ mktemp ++ local LAST_ERR=/tmp/tmp.YAygxnTeh9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TArtv0ep6M ++ cat /tmp/tmp.YAygxnTeh9 ++ rm /tmp/tmp.TArtv0ep6M /tmp/tmp.YAygxnTeh9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OqHhDAhA2C +++ mktemp ++ local LAST_ERR=/tmp/tmp.izRRjUSDfp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OqHhDAhA2C ++ cat /tmp/tmp.izRRjUSDfp ++ rm /tmp/tmp.OqHhDAhA2C /tmp/tmp.izRRjUSDfp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VOJdwc1vAw +++ mktemp ++ local LAST_ERR=/tmp/tmp.lCuPyuDPap ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VOJdwc1vAw ++ cat /tmp/tmp.lCuPyuDPap ++ rm /tmp/tmp.VOJdwc1vAw /tmp/tmp.lCuPyuDPap ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FT4wW2DI3c +++ mktemp ++ local LAST_ERR=/tmp/tmp.RWXo83002U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FT4wW2DI3c ++ cat /tmp/tmp.RWXo83002U ++ rm /tmp/tmp.FT4wW2DI3c /tmp/tmp.RWXo83002U ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gGjZ9YSwl7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sHyRRdtDev ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gGjZ9YSwl7 ++ cat /tmp/tmp.sHyRRdtDev ++ rm /tmp/tmp.gGjZ9YSwl7 /tmp/tmp.sHyRRdtDev ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8l5SKoxNQM +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ub4kp8PGSo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8l5SKoxNQM ++ cat /tmp/tmp.Ub4kp8PGSo ++ rm /tmp/tmp.8l5SKoxNQM /tmp/tmp.Ub4kp8PGSo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dhz8aymUEw +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z5STpHI7FE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dhz8aymUEw ++ cat /tmp/tmp.Z5STpHI7FE ++ rm /tmp/tmp.Dhz8aymUEw /tmp/tmp.Z5STpHI7FE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LiOUwaDJiB +++ mktemp ++ local LAST_ERR=/tmp/tmp.mEy1Z1GtrF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LiOUwaDJiB ++ cat /tmp/tmp.mEy1Z1GtrF ++ rm /tmp/tmp.LiOUwaDJiB /tmp/tmp.mEy1Z1GtrF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kp2Y7HD6Y3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.83L3W94Je6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kp2Y7HD6Y3 ++ cat /tmp/tmp.83L3W94Je6 ++ rm /tmp/tmp.kp2Y7HD6Y3 /tmp/tmp.83L3W94Je6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U2JlKujXRW +++ mktemp ++ local LAST_ERR=/tmp/tmp.7RXHvcPg9w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U2JlKujXRW ++ cat /tmp/tmp.7RXHvcPg9w ++ rm /tmp/tmp.U2JlKujXRW /tmp/tmp.7RXHvcPg9w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T9tpdWTrkB +++ mktemp ++ local LAST_ERR=/tmp/tmp.ycDd40HDjo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T9tpdWTrkB ++ cat /tmp/tmp.ycDd40HDjo ++ rm /tmp/tmp.T9tpdWTrkB /tmp/tmp.ycDd40HDjo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UIDQY2HoiC +++ mktemp ++ local LAST_ERR=/tmp/tmp.7fz9e12Ml0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UIDQY2HoiC ++ cat /tmp/tmp.7fz9e12Ml0 ++ rm /tmp/tmp.UIDQY2HoiC /tmp/tmp.7fz9e12Ml0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u6QKfrF4Gk +++ mktemp ++ local LAST_ERR=/tmp/tmp.HRW37bRaqW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u6QKfrF4Gk ++ cat /tmp/tmp.HRW37bRaqW ++ rm /tmp/tmp.u6QKfrF4Gk /tmp/tmp.HRW37bRaqW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RFW26GIZis +++ mktemp ++ local LAST_ERR=/tmp/tmp.tSyXcucmnB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RFW26GIZis ++ cat /tmp/tmp.tSyXcucmnB ++ rm /tmp/tmp.RFW26GIZis /tmp/tmp.tSyXcucmnB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tzk3roZYuY +++ mktemp ++ local LAST_ERR=/tmp/tmp.7AvCqYdXbz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tzk3roZYuY ++ cat /tmp/tmp.7AvCqYdXbz ++ rm /tmp/tmp.tzk3roZYuY /tmp/tmp.7AvCqYdXbz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fc34ZDG8Pp +++ mktemp ++ local LAST_ERR=/tmp/tmp.v7TYsgYsPH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Fc34ZDG8Pp ++ cat /tmp/tmp.v7TYsgYsPH ++ rm /tmp/tmp.Fc34ZDG8Pp /tmp/tmp.v7TYsgYsPH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9r2kGGjaDi +++ mktemp ++ local LAST_ERR=/tmp/tmp.kUUr53jPOF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9r2kGGjaDi ++ cat /tmp/tmp.kUUr53jPOF ++ rm /tmp/tmp.9r2kGGjaDi /tmp/tmp.kUUr53jPOF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MvpTd2TtRF +++ mktemp ++ local LAST_ERR=/tmp/tmp.y50fDTrHqr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MvpTd2TtRF ++ cat /tmp/tmp.y50fDTrHqr ++ rm /tmp/tmp.MvpTd2TtRF /tmp/tmp.y50fDTrHqr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P2ivNBH7nP +++ mktemp ++ local LAST_ERR=/tmp/tmp.yNHqBpXEtW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P2ivNBH7nP ++ cat /tmp/tmp.yNHqBpXEtW ++ rm /tmp/tmp.P2ivNBH7nP /tmp/tmp.yNHqBpXEtW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.up3beg5orz +++ mktemp ++ local LAST_ERR=/tmp/tmp.hEtvgcofgd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.up3beg5orz ++ cat /tmp/tmp.hEtvgcofgd ++ rm /tmp/tmp.up3beg5orz /tmp/tmp.hEtvgcofgd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d7tiYjvAIz +++ mktemp ++ local LAST_ERR=/tmp/tmp.nUW96D1Zcu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d7tiYjvAIz ++ cat /tmp/tmp.nUW96D1Zcu ++ rm /tmp/tmp.d7tiYjvAIz /tmp/tmp.nUW96D1Zcu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9ISZN68rEB +++ mktemp ++ local LAST_ERR=/tmp/tmp.XUpRVyd7k9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9ISZN68rEB ++ cat /tmp/tmp.XUpRVyd7k9 ++ rm /tmp/tmp.9ISZN68rEB /tmp/tmp.XUpRVyd7k9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zP5ne3QqhE +++ mktemp ++ local LAST_ERR=/tmp/tmp.FRuLPwCjE2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zP5ne3QqhE ++ cat /tmp/tmp.FRuLPwCjE2 ++ rm /tmp/tmp.zP5ne3QqhE /tmp/tmp.FRuLPwCjE2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uh8l6lKduw +++ mktemp ++ local LAST_ERR=/tmp/tmp.9JJ8jQ8n5h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uh8l6lKduw ++ cat /tmp/tmp.9JJ8jQ8n5h ++ rm /tmp/tmp.uh8l6lKduw /tmp/tmp.9JJ8jQ8n5h ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a7GApreBAi +++ mktemp ++ local LAST_ERR=/tmp/tmp.8gPLHvSEIT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a7GApreBAi ++ cat /tmp/tmp.8gPLHvSEIT ++ rm /tmp/tmp.a7GApreBAi /tmp/tmp.8gPLHvSEIT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kVnaN8KXg1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RSSz6iem4E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kVnaN8KXg1 ++ cat /tmp/tmp.RSSz6iem4E ++ rm /tmp/tmp.kVnaN8KXg1 /tmp/tmp.RSSz6iem4E ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IC7DZomyjV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZNe1Xe7Vtt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IC7DZomyjV ++ cat /tmp/tmp.ZNe1Xe7Vtt ++ rm /tmp/tmp.IC7DZomyjV /tmp/tmp.ZNe1Xe7Vtt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XM0vNVfC2I +++ mktemp ++ local LAST_ERR=/tmp/tmp.TugpRaHDr1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XM0vNVfC2I ++ cat /tmp/tmp.TugpRaHDr1 ++ rm /tmp/tmp.XM0vNVfC2I /tmp/tmp.TugpRaHDr1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MxNH6KckEn +++ mktemp ++ local LAST_ERR=/tmp/tmp.49NnQaAtz1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MxNH6KckEn ++ cat /tmp/tmp.49NnQaAtz1 ++ rm /tmp/tmp.MxNH6KckEn /tmp/tmp.49NnQaAtz1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CrILnJh5EY +++ mktemp ++ local LAST_ERR=/tmp/tmp.r56JVPf6uC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CrILnJh5EY ++ cat /tmp/tmp.r56JVPf6uC ++ rm /tmp/tmp.CrILnJh5EY /tmp/tmp.r56JVPf6uC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KTceeh4rjh +++ mktemp ++ local LAST_ERR=/tmp/tmp.urw7zq5ndf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KTceeh4rjh ++ cat /tmp/tmp.urw7zq5ndf ++ rm /tmp/tmp.KTceeh4rjh /tmp/tmp.urw7zq5ndf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6OfyiOhDuN +++ mktemp ++ local LAST_ERR=/tmp/tmp.XWxXtGrtJ0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6OfyiOhDuN ++ cat /tmp/tmp.XWxXtGrtJ0 ++ rm /tmp/tmp.6OfyiOhDuN /tmp/tmp.XWxXtGrtJ0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3tqwQfHUcx +++ mktemp ++ local LAST_ERR=/tmp/tmp.KshFQ6R33J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3tqwQfHUcx ++ cat /tmp/tmp.KshFQ6R33J ++ rm /tmp/tmp.3tqwQfHUcx /tmp/tmp.KshFQ6R33J ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zmo5t7WsHH +++ mktemp ++ local LAST_ERR=/tmp/tmp.6o1LmpL8fb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zmo5t7WsHH ++ cat /tmp/tmp.6o1LmpL8fb ++ rm /tmp/tmp.Zmo5t7WsHH /tmp/tmp.6o1LmpL8fb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aXkOcCYDAW +++ mktemp ++ local LAST_ERR=/tmp/tmp.5isVtL6Fze ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aXkOcCYDAW ++ cat /tmp/tmp.5isVtL6Fze ++ rm /tmp/tmp.aXkOcCYDAW /tmp/tmp.5isVtL6Fze ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BDGI0LpIBW +++ mktemp ++ local LAST_ERR=/tmp/tmp.nWOCpHWQ0Z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BDGI0LpIBW ++ cat /tmp/tmp.nWOCpHWQ0Z ++ rm /tmp/tmp.BDGI0LpIBW /tmp/tmp.nWOCpHWQ0Z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pjdsz3koRE +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZjOXzwWvfd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Pjdsz3koRE ++ cat /tmp/tmp.ZjOXzwWvfd ++ rm /tmp/tmp.Pjdsz3koRE /tmp/tmp.ZjOXzwWvfd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DSbJLlVtRZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.yNy7IdfoYB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DSbJLlVtRZ ++ cat /tmp/tmp.yNy7IdfoYB ++ rm /tmp/tmp.DSbJLlVtRZ /tmp/tmp.yNy7IdfoYB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZYkhlJf0Ih +++ mktemp ++ local LAST_ERR=/tmp/tmp.V3XyLBHIgY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZYkhlJf0Ih ++ cat /tmp/tmp.V3XyLBHIgY ++ rm /tmp/tmp.ZYkhlJf0Ih /tmp/tmp.V3XyLBHIgY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kH65lgVczb +++ mktemp ++ local LAST_ERR=/tmp/tmp.VZ1msUmzNi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kH65lgVczb ++ cat /tmp/tmp.VZ1msUmzNi ++ rm /tmp/tmp.kH65lgVczb /tmp/tmp.VZ1msUmzNi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f8ptmbLiqx +++ mktemp ++ local LAST_ERR=/tmp/tmp.9c6YBVbli0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f8ptmbLiqx ++ cat /tmp/tmp.9c6YBVbli0 ++ rm /tmp/tmp.f8ptmbLiqx /tmp/tmp.9c6YBVbli0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ti1s3ddUwT +++ mktemp ++ local LAST_ERR=/tmp/tmp.fSyfKbBoih ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ti1s3ddUwT ++ cat /tmp/tmp.fSyfKbBoih ++ rm /tmp/tmp.Ti1s3ddUwT /tmp/tmp.fSyfKbBoih ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QBZVoG77zV +++ mktemp ++ local LAST_ERR=/tmp/tmp.B9UP6Cq1pv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QBZVoG77zV ++ cat /tmp/tmp.B9UP6Cq1pv ++ rm /tmp/tmp.QBZVoG77zV /tmp/tmp.B9UP6Cq1pv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oLZj94HnDd +++ mktemp ++ local LAST_ERR=/tmp/tmp.7H2DM1BkNS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oLZj94HnDd ++ cat /tmp/tmp.7H2DM1BkNS ++ rm /tmp/tmp.oLZj94HnDd /tmp/tmp.7H2DM1BkNS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.snMn8pZ8WN +++ mktemp ++ local LAST_ERR=/tmp/tmp.zEDqSpQ1TS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.snMn8pZ8WN ++ cat /tmp/tmp.zEDqSpQ1TS ++ rm /tmp/tmp.snMn8pZ8WN /tmp/tmp.zEDqSpQ1TS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y19ffzvggK +++ mktemp ++ local LAST_ERR=/tmp/tmp.7CL4d5tp3D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y19ffzvggK ++ cat /tmp/tmp.7CL4d5tp3D ++ rm /tmp/tmp.Y19ffzvggK /tmp/tmp.7CL4d5tp3D ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 51 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0jsJ2UNb3Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.ppQKdzLSwH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0jsJ2UNb3Y ++ cat /tmp/tmp.ppQKdzLSwH ++ rm /tmp/tmp.0jsJ2UNb3Y /tmp/tmp.ppQKdzLSwH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 52 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tK8zbqz4r3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Foi9E4n4zk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tK8zbqz4r3 ++ cat /tmp/tmp.Foi9E4n4zk ++ rm /tmp/tmp.tK8zbqz4r3 /tmp/tmp.Foi9E4n4zk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 53 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iuM1hmXhzi +++ mktemp ++ local LAST_ERR=/tmp/tmp.WCIHWaL0Ql ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iuM1hmXhzi ++ cat /tmp/tmp.WCIHWaL0Ql ++ rm /tmp/tmp.iuM1hmXhzi /tmp/tmp.WCIHWaL0Ql ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 54 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xH2VJ0zSGC +++ mktemp ++ local LAST_ERR=/tmp/tmp.6TlHXkzwVC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xH2VJ0zSGC ++ cat /tmp/tmp.6TlHXkzwVC ++ rm /tmp/tmp.xH2VJ0zSGC /tmp/tmp.6TlHXkzwVC ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gpTyhvTrEe +++ mktemp ++ local LAST_ERR=/tmp/tmp.mhYQXmijWR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gpTyhvTrEe ++ cat /tmp/tmp.mhYQXmijWR ++ rm /tmp/tmp.gpTyhvTrEe /tmp/tmp.mhYQXmijWR ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8tO5LQEzws ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Gd7Xt1ay2b +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8tO5LQEzws +++++ cat /tmp/tmp.Gd7Xt1ay2b +++++ rm /tmp/tmp.8tO5LQEzws /tmp/tmp.Gd7Xt1ay2b +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eG3a8aldR1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NY4js1xx2J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eG3a8aldR1 ++ cat /tmp/tmp.NY4js1xx2J ++ rm /tmp/tmp.eG3a8aldR1 /tmp/tmp.NY4js1xx2J ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TyZbgOTfmx +++ mktemp ++ local LAST_ERR=/tmp/tmp.YfLmbUJ6lz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TyZbgOTfmx ++ cat /tmp/tmp.YfLmbUJ6lz ++ rm /tmp/tmp.TyZbgOTfmx /tmp/tmp.YfLmbUJ6lz ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.XbOkwhdN5o ++ mktemp + local LAST_ERR=/tmp/tmp.kyIGDaqq3q + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XbOkwhdN5o secret/my-cluster-secrets patched + cat /tmp/tmp.kyIGDaqq3q + rm /tmp/tmp.XbOkwhdN5o /tmp/tmp.kyIGDaqq3q + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GP4SpA7Zcj +++ mktemp ++ local LAST_ERR=/tmp/tmp.DCUxGmelJI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GP4SpA7Zcj ++ cat /tmp/tmp.DCUxGmelJI ++ rm /tmp/tmp.GP4SpA7Zcj /tmp/tmp.DCUxGmelJI ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HVFo4X2c6F +++ mktemp ++ local LAST_ERR=/tmp/tmp.UAAW78kO1z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HVFo4X2c6F ++ cat /tmp/tmp.UAAW78kO1z ++ rm /tmp/tmp.HVFo4X2c6F /tmp/tmp.UAAW78kO1z ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.N8zlFfOtRN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.nZ4rPPJSew +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.N8zlFfOtRN +++++ cat /tmp/tmp.nZ4rPPJSew +++++ rm /tmp/tmp.N8zlFfOtRN /tmp/tmp.nZ4rPPJSew +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7vn49Ai9rQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.qpO28d6pU2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7vn49Ai9rQ ++ cat /tmp/tmp.qpO28d6pU2 ++ rm /tmp/tmp.7vn49Ai9rQ /tmp/tmp.qpO28d6pU2 ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iciQhAkAFM +++ mktemp ++ local LAST_ERR=/tmp/tmp.EXjkO6A5RX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iciQhAkAFM ++ cat /tmp/tmp.EXjkO6A5RX ++ rm /tmp/tmp.iciQhAkAFM /tmp/tmp.EXjkO6A5RX ++ return 0 + client_pod=pxc-client-59944c5bbf-mvthb + wait_pod pxc-client-59944c5bbf-mvthb + local pod=pxc-client-59944c5bbf-mvthb + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mvthb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mvthb condition met waiting for pod/pxc-client-59944c5bbf-mvthb to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.sCrDh3TiZm/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2213/e2e-tests/users/compare/select-3.sql /tmp/tmp.sCrDh3TiZm/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FjZWoV4HBV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZnCLrVn9gu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FjZWoV4HBV ++ cat /tmp/tmp.ZnCLrVn9gu ++ rm /tmp/tmp.FjZWoV4HBV /tmp/tmp.ZnCLrVn9gu ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-29404 + local namespace=users-29404 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u + grep -v 'get backup status: Job.batch' + tee /tmp/tmp.sCrDh3TiZm/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.j7m37bBTLI +++ mktemp ++ local LAST_ERR=/tmp/tmp.zhTSsAxq6s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j7m37bBTLI ++ cat /tmp/tmp.zhTSsAxq6s ++ rm /tmp/tmp.j7m37bBTLI /tmp/tmp.zhTSsAxq6s ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-f5b849cf6-rz4rd ++ mktemp + local LAST_OUT=/tmp/tmp.FrxOG7mZrj ++ mktemp + local LAST_ERR=/tmp/tmp.2K5HLYuStw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-f5b849cf6-rz4rd + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FrxOG7mZrj + cat /tmp/tmp.2K5HLYuStw + rm /tmp/tmp.FrxOG7mZrj /tmp/tmp.2K5HLYuStw + return 0 } }, }, { }, }, { }, }, ""), }, { }, }, }, - }, - { - }, - { - }, - }, + }, ... // 16 identical fields ... // 16 identical fields "2", 2025-10-15T11:46:03.564Z INFO setup Manager starting up {"gitCommit": "6c08ea71a9c64b418e03f427c80eeb7b401eee22", "gitBranch": "PR-2213-6c08ea71", "buildTime": "2025-10-15T09:27:18Z", "goVersion": "go1.24.9", "os": "linux", "arch": "amd64"} 2025-10-15T11:46:03.564Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1023000"} 2025-10-15T11:46:03.567Z INFO setup Registering Components. 2025-10-15T11:46:04.745Z INFO controller-runtime.metrics Starting metrics server 2025-10-15T11:46:04.745Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-10-15T11:46:04.745Z INFO setup Starting the Cmd. 2025-10-15T11:46:04.746Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-10-15T11:46:04.746Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-10-15T11:46:04.746Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-10-15T11:46:04.746Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-10-15T11:46:04.746Z INFO controller-runtime.webhook Starting webhook server 2025-10-15T11:46:04.746Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-10-15T11:46:04.846Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-10-15T11:46:04.894Z DEBUG events percona-xtradb-cluster-operator-f5b849cf6-rz4rd_d611285a-686e-4761-ae2e-b4408e16c57d became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"e03a796e-786c-4adc-86ba-aa294f5aff75","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1760528764888383009"}, "reason": "LeaderElection"} 2025-10-15T11:46:04.894Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-10-15T11:46:04.894Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-10-15T11:46:04.894Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-10-15T11:46:04.894Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-10-15T11:46:04.895Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-10-15T11:46:04.995Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-10-15T11:46:04.995Z INFO Starting Controller {"controller": "pxc-controller"} 2025-10-15T11:46:04.995Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-10-15T11:46:04.995Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-10-15T11:46:04.995Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-10-15T11:46:04.995Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-10-15T11:46:39.509Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "0dd3bd53-c778-48fe-98ea-a7a517c5b7d9", "version": "1.19.0"} 2025-10-15T11:46:39.744Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "0dd3bd53-c778-48fe-98ea-a7a517c5b7d9", "secrets": "my-cluster-secrets"} 2025-10-15T11:46:39.964Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "0dd3bd53-c778-48fe-98ea-a7a517c5b7d9", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-15T11:46:39.982Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "0dd3bd53-c778-48fe-98ea-a7a517c5b7d9", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-15T11:46:40.550Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "0dd3bd53-c778-48fe-98ea-a7a517c5b7d9", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-15T11:46:40.664Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "3e4d9249-efaa-4349-9e1b-88fee981a620", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-15T11:46:40.701Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "3e4d9249-efaa-4349-9e1b-88fee981a620", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-15T11:46:40.757Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "3e4d9249-efaa-4349-9e1b-88fee981a620", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-15T11:46:40.810Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "3e4d9249-efaa-4349-9e1b-88fee981a620", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-15T11:46:40.850Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "3e4d9249-efaa-4349-9e1b-88fee981a620", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-15T11:46:41.007Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "3e4d9249-efaa-4349-9e1b-88fee981a620", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-15T11:46:41.926Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "f807f8ee-aeac-43da-9fbb-08734322df9a", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-15T11:46:41.970Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "f807f8ee-aeac-43da-9fbb-08734322df9a", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-15T11:47:58.298Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a50d567c-7cbc-4ab2-9be1-517e65f149c1", "user": "operator"} 2025-10-15T11:47:58.332Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a50d567c-7cbc-4ab2-9be1-517e65f149c1", "user": "monitor"} 2025-10-15T11:47:58.380Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a50d567c-7cbc-4ab2-9be1-517e65f149c1"} 2025-10-15T11:47:58.412Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a50d567c-7cbc-4ab2-9be1-517e65f149c1"} 2025-10-15T11:47:58.455Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a50d567c-7cbc-4ab2-9be1-517e65f149c1", "user": "xtrabackup"} 2025-10-15T11:47:58.492Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a50d567c-7cbc-4ab2-9be1-517e65f149c1"} 2025-10-15T11:47:58.519Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a50d567c-7cbc-4ab2-9be1-517e65f149c1", "user": "replication"} 2025-10-15T11:47:58.528Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a50d567c-7cbc-4ab2-9be1-517e65f149c1", "err": "get primary pxc pod: not found"} 2025-10-15T11:48:03.250Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "3a5e45c5-437c-4584-a626-40c399ee24e1", "err": "get primary pxc pod: not found"} 2025-10-15T11:48:08.445Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b1976aac-2fb7-44fc-9054-4d0a5580c1ef", "err": "get primary pxc pod: not found"} 2025-10-15T11:50:14.660Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9771c132-7100-4ee4-a5da-7d3484fd2b2d", "user": "root"} 2025-10-15T11:50:14.799Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9771c132-7100-4ee4-a5da-7d3484fd2b2d", "new version": "8.0.43-34.1"} 2025-10-15T11:50:16.511Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9771c132-7100-4ee4-a5da-7d3484fd2b2d"} 2025-10-15T11:50:21.320Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "881d485d-e928-4610-8508-414577073d02"} 2025-10-15T11:50:26.612Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "2f28974e-2de6-4191-a5fc-dbc3474ffed0"} 2025-10-15T11:50:32.218Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "08bbafe1-da5f-448d-9a05-3f932c5006ed"} 2025-10-15T11:50:37.419Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "09399911-5a75-4d21-a411-616a7bfe8590"} 2025-10-15T11:50:42.820Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a1c6c6dc-590f-459d-8e11-9404c8159884"} 2025-10-15T11:50:48.058Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "d0a2e7f0-9c38-4df9-a9c1-84163ae9b4f5"} 2025-10-15T11:50:53.260Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "e9ddb007-91ed-45a4-81c0-288bc4cdfc8a"} 2025-10-15T11:50:58.481Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "ae73d218-ced6-4b03-86ca-ebf1a9b221b6", "error": "syncusers: ERROR (line:1301) : Could not find any nodes belonging to the cluster with writer hostgroup:11\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "syncusers: ERROR (line:1301) : Could not find any nodes belonging to the cluster with writer hostgroup:11\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:969\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-15T11:51:03.947Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "8106889c-4352-402f-9034-cb32ead49d10"} 2025-10-15T11:51:09.351Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "71a5fa4a-8cb3-4154-ac08-07f7418a7c34"} 2025-10-15T11:51:15.321Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "904f8828-3e5c-4419-ba16-02dc4eedbbd0"} 2025-10-15T11:51:20.244Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "e9352adc-7c92-43e8-be14-d63709ead1d7"} 2025-10-15T11:51:25.650Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "3edf4abd-78f1-4946-b7e9-02628d589ffe"} 2025-10-15T11:51:31.019Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "68023039-13c4-4590-ac9c-b0cef7b46e5a"} 2025-10-15T11:51:35.499Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "c1fe3597-1994-4c7c-9ede-885c551bf72f", "user": "root"} 2025-10-15T11:51:35.520Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "c1fe3597-1994-4c7c-9ede-885c551bf72f", "user": "root"} 2025-10-15T11:51:35.538Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "c1fe3597-1994-4c7c-9ede-885c551bf72f", "secret": "some-name-mysql-init", "user": "root"} 2025-10-15T11:51:36.776Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "dbcb4209-5d90-40ea-977d-9be3e4aaba94"} 2025-10-15T11:51:37.703Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "c1fe3597-1994-4c7c-9ede-885c551bf72f"} 2025-10-15T11:51:37.726Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "c1fe3597-1994-4c7c-9ede-885c551bf72f", "user": "root"} 2025-10-15T11:51:37.745Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "c1fe3597-1994-4c7c-9ede-885c551bf72f", "user": "root"} 2025-10-15T11:51:39.541Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "c1fe3597-1994-4c7c-9ede-885c551bf72f"} 2025-10-15T11:51:41.721Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a2419a8a-d9c2-4e11-b7e1-219d350c57dd"} 2025-10-15T11:51:47.212Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b111e39b-5dfa-4db7-8323-5c5933148b4f"} 2025-10-15T11:51:52.521Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "c670a738-7831-4bcf-b15e-bf6bcae0258d"} 2025-10-15T11:51:54.173Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a5111241-11b8-4e99-bb7b-a75c3eae1e81", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T11:51:54.241Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a5111241-11b8-4e99-bb7b-a75c3eae1e81", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T11:51:57.240Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a5111241-11b8-4e99-bb7b-a75c3eae1e81", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-15T11:52:17.094Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "293ea0d4-7ebb-49d4-b6e1-5636c685890b", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-15T11:52:18.809Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "ec27076f-3cba-4a29-9bac-4861145fd72b", "user": "proxyadmin"} 2025-10-15T11:52:18.809Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "ec27076f-3cba-4a29-9bac-4861145fd72b", "user": "proxyadmin"} 2025-10-15T11:52:18.836Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "ec27076f-3cba-4a29-9bac-4861145fd72b", "user": "proxyadmin"} 2025-10-15T11:52:18.863Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "ec27076f-3cba-4a29-9bac-4861145fd72b", "user": "proxyadmin"} 2025-10-15T11:52:18.863Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "ec27076f-3cba-4a29-9bac-4861145fd72b", "last-applied-secret": "90a066ca6a91b3ea98b7f8501957d903b2750e62b4a91b4b65ef9d1c58628b46"} 2025-10-15T11:52:18.869Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "ec27076f-3cba-4a29-9bac-4861145fd72b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T11:52:20.261Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "5e69c01f-6b6a-44c8-8c0e-eadc8f684714", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-15T11:53:03.503Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "157c7402-7f8e-456f-aea3-7859a96fb7e0"} 2025-10-15T11:53:07.996Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "6a02e2d4-1797-4efe-b080-a6856f5da51c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T11:53:08.044Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "6a02e2d4-1797-4efe-b080-a6856f5da51c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T11:53:09.504Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "7f84258d-637c-4772-919a-08026753df17", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-15T11:53:09.998Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "92c0e69d-9a4b-4853-b032-24d8a858bee2", "user": "xtrabackup"} 2025-10-15T11:53:10.010Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "92c0e69d-9a4b-4853-b032-24d8a858bee2", "user": "xtrabackup"} 2025-10-15T11:53:10.041Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "92c0e69d-9a4b-4853-b032-24d8a858bee2", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-15T11:53:10.060Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "92c0e69d-9a4b-4853-b032-24d8a858bee2", "user": "xtrabackup"} 2025-10-15T11:53:10.072Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "92c0e69d-9a4b-4853-b032-24d8a858bee2", "user": "xtrabackup"} 2025-10-15T11:53:10.079Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "92c0e69d-9a4b-4853-b032-24d8a858bee2", "last-applied-secret": "defce13451be7246a43833a3de40950fd1dcd0fed05db48dcb332931d21af1f5"} 2025-10-15T11:53:10.082Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "92c0e69d-9a4b-4853-b032-24d8a858bee2", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T11:53:12.365Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "5b3d2484-4c84-4460-aab1-d4bb7b70a476", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T11:53:12.654Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "92c0e69d-9a4b-4853-b032-24d8a858bee2"} 2025-10-15T11:53:58.630Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "59895ad1-1b34-4971-838d-72cf579fd9d1", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T11:54:08.841Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "7eb1d5a9-955c-4ecf-9ab2-6394ef8d48f8", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.235.241.70:33062: connect: connection refused"} 2025-10-15T11:54:46.229Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "5f2e6089-7c12-4439-9fa3-c6806e3d2c9a", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T11:54:46.520Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "97dc8bdb-5c6d-408e-a364-b5d04cf9c6dc", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T11:54:51.507Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "5307141d-95d0-4e4d-90cf-d4a300c14e48", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T11:54:56.773Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "cacc88c8-3a3d-45e8-ac92-bd73d388537e", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T11:55:01.940Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "5ee3694a-ef5f-4029-be5a-4164f5ad97d7", "primary name": "some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local"} 2025-10-15T11:55:07.158Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "12a21d14-2eae-4687-a3ce-a1ed9461a7ac", "primary name": "some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local"} 2025-10-15T11:55:12.300Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "72b5d8ae-edbc-47e4-a8d1-c7f6515cb3ed", "primary name": "some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local"} 2025-10-15T11:55:17.459Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "ed8819c2-e579-41b6-a967-7b381850d954", "primary name": "some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local"} 2025-10-15T11:55:22.600Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "ac9d481a-e13c-45a0-9a86-8d6ad1fbbd79", "primary name": "some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local"} 2025-10-15T11:55:27.757Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "e69fb6be-7bac-44c3-b706-c29b9c4f275c", "primary name": "some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local"} 2025-10-15T11:55:35.664Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "0a637d50-cb51-457c-9650-41e1724e7f01"} 2025-10-15T11:55:40.146Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "833ec11b-947f-4fe7-8fa0-56d23588aff3", "user": "monitor"} 2025-10-15T11:55:40.158Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "833ec11b-947f-4fe7-8fa0-56d23588aff3", "user": "monitor"} 2025-10-15T11:55:40.175Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "833ec11b-947f-4fe7-8fa0-56d23588aff3", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-15T11:55:40.227Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "833ec11b-947f-4fe7-8fa0-56d23588aff3", "user": "monitor"} 2025-10-15T11:55:40.247Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "833ec11b-947f-4fe7-8fa0-56d23588aff3", "user": "monitor"} 2025-10-15T11:55:40.547Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "833ec11b-947f-4fe7-8fa0-56d23588aff3", "last-applied-secret": "71645309d451685ee3bec1527364104113e8919fdeb42134fa372bdaf041549a"} 2025-10-15T11:55:40.551Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "833ec11b-947f-4fe7-8fa0-56d23588aff3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T11:55:41.674Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "5275bb3d-7f78-4942-a3dc-fe81e091b7fa", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: monitor\nAdding user to ProxySQL: monitor\n Added query rule for user: monitor\n / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: monitor\nAdding user to ProxySQL: monitor\n Added query rule for user: monitor\n / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-15T11:56:37.892Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "d625bab1-5fcc-4dc5-8bbb-c6b2304824df", "user": "monitor"} 2025-10-15T11:56:40.601Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "d625bab1-5fcc-4dc5-8bbb-c6b2304824df"} 2025-10-15T11:56:42.883Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "4e6a48c6-c10c-4b29-880b-de63dc6b6a7a", "user": "monitor"} 2025-10-15T11:56:45.470Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "4e6a48c6-c10c-4b29-880b-de63dc6b6a7a"} 2025-10-15T11:56:48.519Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "0368284c-966d-416e-bd73-e7458848e942", "user": "monitor"} 2025-10-15T11:56:49.400Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "0368284c-966d-416e-bd73-e7458848e942", "user": "monitor"} 2025-10-15T11:56:49.415Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "0368284c-966d-416e-bd73-e7458848e942", "last-applied-secret": "71645309d451685ee3bec1527364104113e8919fdeb42134fa372bdaf041549a"} 2025-10-15T11:56:51.276Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "0368284c-966d-416e-bd73-e7458848e942"} 2025-10-15T11:56:56.644Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "d7b8e80a-5fd1-4b10-a002-6d8592f31908"} 2025-10-15T11:57:01.960Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b6cb50ac-e99c-4331-89b7-4c7accecbe74"} 2025-10-15T11:57:07.382Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "3fd35b95-b76b-44e8-8fca-84798c55e1e3"} 2025-10-15T11:57:12.553Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2e127c5-3173-4407-9c57-2ba4d78c0e29"} 2025-10-15T11:57:12.951Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a383940a-0221-4fe9-bbc6-81aa0bf4e524", "user": "operator"} 2025-10-15T11:57:12.963Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a383940a-0221-4fe9-bbc6-81aa0bf4e524", "user": "operator"} 2025-10-15T11:57:12.988Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a383940a-0221-4fe9-bbc6-81aa0bf4e524", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-15T11:57:13.010Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a383940a-0221-4fe9-bbc6-81aa0bf4e524", "user": "operator"} 2025-10-15T11:57:13.024Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a383940a-0221-4fe9-bbc6-81aa0bf4e524", "user": "operator"} 2025-10-15T11:57:13.041Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a383940a-0221-4fe9-bbc6-81aa0bf4e524", "last-applied-secret": "db9420354525565efff4b67933e4c7d0bf9b2a0b26031d6d54e4184fd81cec0a"} 2025-10-15T11:57:13.045Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a383940a-0221-4fe9-bbc6-81aa0bf4e524", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T11:57:16.742Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a383940a-0221-4fe9-bbc6-81aa0bf4e524", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-15T11:57:33.493Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "6409e445-bd23-476e-9850-2dd786d448ba"} 2025-10-15T11:57:37.697Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "3425b672-ccc8-4ab3-90b4-c11975bb8806"} 2025-10-15T11:57:52.369Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "secrets": "my-cluster-secrets-2"} 2025-10-15T11:57:52.376Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "root"} 2025-10-15T11:57:52.396Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "root"} 2025-10-15T11:57:52.416Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "secret": "some-name-mysql-init", "user": "root"} 2025-10-15T11:57:54.632Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d"} 2025-10-15T11:57:54.655Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "root"} 2025-10-15T11:57:54.676Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "root"} 2025-10-15T11:57:54.684Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "operator"} 2025-10-15T11:57:54.698Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "operator"} 2025-10-15T11:57:54.720Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-15T11:57:54.740Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "operator"} 2025-10-15T11:57:54.755Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "operator"} 2025-10-15T11:57:54.764Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "monitor"} 2025-10-15T11:57:54.780Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "monitor"} 2025-10-15T11:57:54.810Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-15T11:57:54.827Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "monitor"} 2025-10-15T11:57:54.848Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "monitor"} 2025-10-15T11:57:55.149Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "xtrabackup"} 2025-10-15T11:57:55.163Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "xtrabackup"} 2025-10-15T11:57:55.181Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-15T11:57:55.209Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "xtrabackup"} 2025-10-15T11:57:55.223Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "xtrabackup"} 2025-10-15T11:57:55.228Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "replication"} 2025-10-15T11:57:55.238Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "replication"} 2025-10-15T11:57:55.258Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "secret": "some-name-mysql-init", "user": "replication"} 2025-10-15T11:57:55.276Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "replication"} 2025-10-15T11:57:55.290Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "replication"} 2025-10-15T11:57:55.290Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "proxyadmin"} 2025-10-15T11:57:55.307Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "proxyadmin"} 2025-10-15T11:57:55.335Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "user": "proxyadmin"} 2025-10-15T11:57:55.335Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "last-applied-secret": "5aef372a93f3aef2c4a58562e45ae02f81aea3de6f89fc2836afce9792e1337e"} 2025-10-15T11:57:55.335Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "last-applied-secret": "5aef372a93f3aef2c4a58562e45ae02f81aea3de6f89fc2836afce9792e1337e"} 2025-10-15T11:57:55.338Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T11:57:55.386Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T11:57:57.350Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b2cd2060-8214-46a8-87bf-5c01dc6eef0d", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-15T11:58:45.077Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "cb0b1f37-1ca4-4dd9-8034-b02f311c7f00", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T11:58:50.049Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "c716d11d-8bf4-4bff-9e26-65964a9abdda", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T11:58:55.331Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "176b2935-0325-48d3-8466-7c5dc9ef9972", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T11:59:37.228Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "4e1513a3-8834-4d2d-b2a5-a7f70f18ce97", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T11:59:42.553Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "4c9137e7-0579-425f-a89d-b8d3cf3ef59f", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T11:59:47.722Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a604e355-9149-4d6c-aa40-283147d7ceab", "primary name": "some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local"} 2025-10-15T11:59:58.067Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "810162bb-d2c7-4958-8e0e-a590caf6fb47", "primary name": "some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local"} 2025-10-15T12:00:08.376Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "c7c632fa-c714-455f-8ad7-0ba26757adff", "primary name": "some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local"} 2025-10-15T12:00:13.532Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "66fc95ed-0141-49a1-b77f-6fda201c9816", "primary name": "some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local"} 2025-10-15T12:00:18.673Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b1502974-a6a1-47ea-8ac4-acb7272e870e", "primary name": "some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local"} 2025-10-15T12:00:23.843Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "8ca7cfa8-d413-4f42-92ec-e3fe4250bc39", "primary name": "some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local"} 2025-10-15T12:00:30.198Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "e3da14b9-2967-4848-84b9-8dc7c600214f", "user": "monitor"} 2025-10-15T12:00:31.116Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "e3da14b9-2967-4848-84b9-8dc7c600214f", "user": "monitor"} 2025-10-15T12:00:31.132Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "e3da14b9-2967-4848-84b9-8dc7c600214f", "last-applied-secret": "5aef372a93f3aef2c4a58562e45ae02f81aea3de6f89fc2836afce9792e1337e"} 2025-10-15T12:00:32.991Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "e3da14b9-2967-4848-84b9-8dc7c600214f"} 2025-10-15T12:00:37.113Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a110f2ba-b418-49a1-9d58-427664925c57", "user": "operator"} 2025-10-15T12:00:37.127Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a110f2ba-b418-49a1-9d58-427664925c57", "user": "operator"} 2025-10-15T12:00:37.146Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a110f2ba-b418-49a1-9d58-427664925c57", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-15T12:00:37.194Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a110f2ba-b418-49a1-9d58-427664925c57", "user": "operator"} 2025-10-15T12:00:37.206Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a110f2ba-b418-49a1-9d58-427664925c57", "user": "operator"} 2025-10-15T12:00:37.223Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a110f2ba-b418-49a1-9d58-427664925c57", "last-applied-secret": "302d5afc2c91730c8417027692013d3681f7c4df13f10d2cf3357cc422522118"} 2025-10-15T12:00:37.230Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "a110f2ba-b418-49a1-9d58-427664925c57", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T12:00:38.101Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "2b29fbb1-e6ab-45a8-8655-51338febe550", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-1\" not found / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local:3306) to ProxySQL\n / ERROR (line:1301) : Could not find any nodes belonging to the cluster with writer hostgroup:11\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR (line:359) : Server connection check failed. \n-- Could not connect to the server at some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local:3306 \n-- Please check the connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-1\" not found / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local:3306) to ProxySQL\n / ERROR (line:1301) : Could not find any nodes belonging to the cluster with writer hostgroup:11\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-29404.svc.' (using password: YES)\nERROR (line:359) : Server connection check failed. \n-- Could not connect to the server at some-name-pxc-0.some-name-pxc.users-29404.svc.cluster.local:3306 \n-- Please check the connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-15T12:01:19.098Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "4f6415bf-8a86-4f0c-a51b-88ce1fefdcda"} 2025-10-15T12:01:23.566Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "aec97c4f-5a41-4137-995e-89a038f93e74"} 2025-10-15T12:01:28.962Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "2e515647-3eb4-431c-84e1-f583361a6702"} 2025-10-15T12:01:34.266Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "7a2d41c6-ffaf-412c-8f7a-2936f9e33506"} 2025-10-15T12:01:39.496Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "f24de684-0d45-470a-a4d0-85a692df68ad"} 2025-10-15T12:01:45.360Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "4b8d2996-dc4b-48ce-b7b9-f5cda7d19564"} 2025-10-15T12:01:49.767Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "f0b5763a-d65b-45be-8cab-07ced721ad35"} 2025-10-15T12:01:56.067Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "c2a69260-9f0c-4c68-ad18-84243f3836cd"} 2025-10-15T12:02:01.560Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "2c8c22e7-5b73-48c1-bb82-cf55ad5e0450"} 2025-10-15T12:02:06.966Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "90f1f9d5-3c37-40c5-ad80-2c17db2d0424"} 2025-10-15T12:02:12.160Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "4b4f1f1e-76f6-4e29-9437-a1e2e149e9d8"} 2025-10-15T12:02:17.658Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "e91c195f-4e8b-4c91-aa2c-40878bdfbdc8"} 2025-10-15T12:02:23.005Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "28fc8594-5e48-46d7-945d-657fa464adae"} 2025-10-15T12:02:28.252Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "19a84d45-cd76-4601-b6a8-f4270fd863fc"} 2025-10-15T12:02:33.571Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "1037589a-ae7d-4ca8-a7e7-d43d88de393a"} 2025-10-15T12:02:34.425Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "root"} 2025-10-15T12:02:34.442Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "root"} 2025-10-15T12:02:34.466Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "secret": "some-name-mysql-init", "user": "root"} 2025-10-15T12:02:36.787Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48"} 2025-10-15T12:02:36.830Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "root"} 2025-10-15T12:02:36.850Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "root"} 2025-10-15T12:02:36.872Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "monitor"} 2025-10-15T12:02:36.884Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "monitor"} 2025-10-15T12:02:36.904Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-15T12:02:36.922Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "monitor"} 2025-10-15T12:02:36.943Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "monitor"} 2025-10-15T12:02:37.237Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "xtrabackup"} 2025-10-15T12:02:37.253Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "xtrabackup"} 2025-10-15T12:02:37.272Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-15T12:02:37.294Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "xtrabackup"} 2025-10-15T12:02:37.308Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "xtrabackup"} 2025-10-15T12:02:37.315Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "proxyadmin"} 2025-10-15T12:02:37.333Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "proxyadmin"} 2025-10-15T12:02:37.360Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "user": "proxyadmin"} 2025-10-15T12:02:37.360Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "last-applied-secret": "2a1202de195058e636cc72e1bf4e2fc68c5de2486f4745969e3d2c6f1b0374ee"} 2025-10-15T12:02:37.360Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "last-applied-secret": "2a1202de195058e636cc72e1bf4e2fc68c5de2486f4745969e3d2c6f1b0374ee"} 2025-10-15T12:02:37.363Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T12:02:37.457Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T12:02:39.262Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9b46d253-30fb-430f-8d21-7d28f579be48", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-15T12:02:55.951Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "7d0ebf8c-4d09-4802-8e4f-c75dbbf77b15", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T12:02:56.003Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "7d0ebf8c-4d09-4802-8e4f-c75dbbf77b15", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-15T12:02:56.097Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "7d0ebf8c-4d09-4802-8e4f-c75dbbf77b15", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-15T12:02:56.171Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "7d0ebf8c-4d09-4802-8e4f-c75dbbf77b15", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-15T12:02:56.247Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "7d0ebf8c-4d09-4802-8e4f-c75dbbf77b15", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-15T12:02:56.986Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "f4beb77b-8bdd-4dc7-a23c-c62911ed943c", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-15T12:05:53.632Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "root"} 2025-10-15T12:05:53.652Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "root"} 2025-10-15T12:05:53.671Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "secret": "some-name-mysql-init", "user": "root"} 2025-10-15T12:05:53.695Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "root"} 2025-10-15T12:05:53.711Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "root"} 2025-10-15T12:05:53.718Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "operator"} 2025-10-15T12:05:53.728Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "operator"} 2025-10-15T12:05:53.746Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-15T12:05:53.766Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "operator"} 2025-10-15T12:05:53.778Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "operator"} 2025-10-15T12:05:53.784Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "monitor"} 2025-10-15T12:05:53.798Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "monitor"} 2025-10-15T12:05:53.820Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-15T12:05:53.844Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "monitor"} 2025-10-15T12:05:54.149Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "xtrabackup"} 2025-10-15T12:05:54.161Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "xtrabackup"} 2025-10-15T12:05:54.183Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-15T12:05:54.205Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "xtrabackup"} 2025-10-15T12:05:54.216Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "xtrabackup"} 2025-10-15T12:05:54.223Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "replication"} 2025-10-15T12:05:54.234Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "replication"} 2025-10-15T12:05:54.251Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "secret": "some-name-mysql-init", "user": "replication"} 2025-10-15T12:05:54.271Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "replication"} 2025-10-15T12:05:54.283Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "user": "replication"} 2025-10-15T12:05:54.283Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "last-applied-secret": "db9420354525565efff4b67933e4c7d0bf9b2a0b26031d6d54e4184fd81cec0a"} 2025-10-15T12:05:54.286Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "83934c13-0633-48e0-be51-b35ac550afea", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-15T12:07:28.073Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9549a798-e448-4086-af3e-b7bb7f93179d", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T12:07:28.437Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b14cb295-73f6-423c-971f-7fe4ce2ab7af", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T12:07:33.395Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "3717806e-9203-4344-a51d-a0b8b6097e08", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29404 on 34.118.224.10:53: no such host"} 2025-10-15T12:08:16.123Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "57cf4f26-786e-4e17-aa3e-91ce98d1be40", "user": "monitor"} 2025-10-15T12:08:17.029Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "57cf4f26-786e-4e17-aa3e-91ce98d1be40", "user": "monitor"} 2025-10-15T12:08:22.493Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "6e3c84c1-bdb0-45e2-a2ab-1662b3f2f326", "user": "monitor"} 2025-10-15T12:08:22.508Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "6e3c84c1-bdb0-45e2-a2ab-1662b3f2f326", "user": "monitor"} 2025-10-15T12:08:22.524Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "6e3c84c1-bdb0-45e2-a2ab-1662b3f2f326", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-15T12:08:22.541Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "6e3c84c1-bdb0-45e2-a2ab-1662b3f2f326", "user": "monitor"} 2025-10-15T12:08:26.463Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "b8972135-ee71-4ca6-ac46-de65d6a624c6", "user": "monitor"} 2025-10-15T12:08:32.020Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "f7443f99-5893-4aca-974b-f13071a47a10", "user": "monitor"} 2025-10-15T12:08:37.629Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "ad7ee46d-30b2-45e8-b386-f46776c18779", "user": "monitor"} 2025-10-15T12:08:43.231Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "9f432b94-4bba-4d33-a1c8-c23e0fc0d544", "user": "monitor"} 2025-10-15T12:08:48.819Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-29404", "name": "some-name", "reconcileID": "d508c4b1-2aba-4aa2-ab5f-85ce2436ef9a", "user": "monitor"} ... // 22 identical fields + "2a1202de195058e636cc72e1bf4e2fc68c5de2486f4745969e3d2c6f1b0374e", - "2a1202de195058e636cc72e1bf4e2fc68c5de2486f4745969e3d2c6f1b0374ee", ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields - "30", + "302d5afc2c91730c8417027692013d3681f7c4df13f10d2cf3357cc422522118", ... // 3 identical elements ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields ... // 4 identical fields - "5aef372a93f3aef2c4a58562e45ae02f81aea3de6f89fc2836afce9792e1337", - "5aef372a93f3aef2c4a58562e45ae02f81aea3de6f89fc2836afce9792e1337e", + "5aef372a93f3aef2c4a58562e45ae02f81aea3de6f89fc2836afce9792e1337e", ... // 5 identical fields ... // 5 identical fields ... // 5 identical fields ... // 6 identical fields ... // 6 identical fields - "71645309d451685ee3bec1527364104113e8919fdeb42134fa372bdaf041549", + "71645309d451685ee3bec1527364104113e8919fdeb42134fa372bdaf041549a", ... // 7 identical fields ... // 8 identical fields - "90a066ca6a91b3ea98b7f8501957d903b2750e62b4a91b4b65ef9d1c58628b46", ... // 9 identical fields ... // 9 identical fields "a", + "a1202de195058e636cc72e1bf4e2fc68c5de2486f4745969e3d2c6f1b0374ee", AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ + APIVersion: "", - APIVersion: "apps/v1", - APIVersion: "apps/v1", - APIVersion: "v1", Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, - Args: []string{"logrotate"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMapKeyRef: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-10-15 11:46:40 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-proxysql-58f7f4b56d", - CurrentRevision: "some-name-proxysql-649d9994d", - CurrentRevision: "some-name-proxysql-667bdf5c47", - CurrentRevision: "some-name-proxysql-7666bd7cb5", - CurrentRevision: "some-name-proxysql-7b8ddbc4f5", - CurrentRevision: "some-name-proxysql-9df7669d7", - CurrentRevision: "some-name-pxc-55d6545c5", - CurrentRevision: "some-name-pxc-6688849885", - CurrentRevision: "some-name-pxc-749f7446b6", - CurrentRevision: "some-name-pxc-76d958846f", - "d5afc2c91730c8417027692013d3681f7c4df13f10d2cf3357cc422522118", DataSource: nil, DataSourceRef: nil, + "db9420354525565efff4b67933e4c7d0bf9b2a0b26031d6d54e4184fd81cec0", - "db9420354525565efff4b67933e4c7d0bf9b2a0b26031d6d54e4184fd81cec0a", + "db9420354525565efff4b67933e4c7d0bf9b2a0b26031d6d54e4184fd81cec0a", - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, - "defce13451be7246a43833a3de40950fd1dcd0fed05db48dcb332931d21af1f5", DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", "e", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, - EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, Env: []v1.EnvVar{ - Env: []v1.EnvVar{ EphemeralContainers: nil, FailureThreshold: 3, FC: nil, FieldPath: "metadata.name", FieldPath: "metadata.namespace", FieldRef: &v1.ObjectFieldSelector{ - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 HostAliases: nil, HostIP: "", HostPort: 0, - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", ImagePullPolicy: "Always", - ImagePullPolicy: "Always", InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "90a066ca6a91b3ea98b7f8501957d903b2750e62b4a91b4b65ef9d1c58628b46", + "last-applied-secret": "defce13451be7246a43833a3de40950fd1dcd0fed05db48dcb332931d21af1f5", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, {Name: "CLUSTER_HASH", Value: "3230101"}, Name: "config", Name: "DEFAULT_AUTHENTICATION_PLUGIN", - {Name: "IS_LOGCOLLECTOR", Value: "yes"}, Name: "ist", {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - Name: "logrotate", - Name: "logs", {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, - {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"}, {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"}, Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, Name: "POD_NAME", Name: "POD_NAMESPASE", - {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, - {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, Name: "proxyadm", {Name: "READINESS_CHECK_TIMEOUT", Value: "15"}, - {Name: "SERVICE_TYPE", Value: "mysql"}, Namespace: "users-29404", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "90a066ca6a91b3ea98b7f8501957d903b2750e62b4a91b4b65ef9d1c58628b46", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "2a1202de195058e636cc72e1bf4e2fc68c5de2486f4745969e3d2c6f1b0374ee", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "1e0432c4-0db5-42cf-bff2-d29b370a0720", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmExMjAyZGUxOTUwNThlNjM2Y2M3MmUxYmY0ZTJmYzY4YzVkZTI0ODZmNDc0NTk2OWUzZDJjNmYxYjAzNzRlZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzAyZDVhZmMyYzkxNzMwYzg0MTcwMjc2OTIwMTNkMzY4MWY3YzRkZjEzZjEwZDJjZjMzNTdjYzQyMjUyMjExOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzAyZDVhZmMyYzkxNzMwYzg0MTcwMjc2OTIwMTNkMzY4MWY3YzRkZjEzZjEwZDJjZjMzNTdjYzQyMjUyMjExOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNWFlZjM3MmE5M2YzYWVmMmM0YTU4NTYyZTQ1YWUwMmY4MWFlYTNkZTZmODlmYzI4MzZhZmNlOTc5MmUxMzM3ZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNWFlZjM3MmE5M2YzYWVmMmM0YTU4NTYyZTQ1YWUwMmY4MWFlYTNkZTZmODlmYzI4MzZhZmNlOTc5MmUxMzM3ZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzE2NDUzMDlkNDUxNjg1ZWUzYmVjMTUyNzM2NDEwNDExM2U4OTE5ZmRlYjQyMTM0ZmEzNzJiZGFmMDQxNTQ5YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzE2NDUzMDlkNDUxNjg1ZWUzYmVjMTUyNzM2NDEwNDExM2U4OTE5ZmRlYjQyMTM0ZmEzNzJiZGFmMDQxNTQ5YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTBhMDY2Y2E2YTkxYjNlYTk4YjdmODUwMTk1N2Q5MDNiMjc1MGU2MmI0YTkxYjRiNjVlZjlkMWM1ODYyOGI0NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGI5NDIwMzU0NTI1NTY1ZWZmZjRiNjc5MzNlNGM3ZDBiZjliMmEwYjI2MDMxZDZkNTRlNDE4NGZkODFjZWMwYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGI5NDIwMzU0NTI1NTY1ZWZmZjRiNjc5MzNlNGM3ZDBiZjliMmEwYjI2MDMxZDZkNTRlNDE4NGZkODFjZWMwYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmExMjAyZGUxOTUwNThlNjM2Y2M3MmUxYmY0ZTJmYzY4YzVkZTI0ODZmNDc0NTk2OWUzZDJjNmYxYjAzNzRlZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmExMjAyZGUxOTUwNThlNjM2Y2M3MmUxYmY0ZTJmYzY4YzVkZTI0ODZmNDc0NTk2OWUzZDJjNmYxYjAzNzRlZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmExMjAyZGUxOTUwNThlNjM2Y2M3MmUxYmY0ZTJmYzY4YzVkZTI0ODZmNDc0NTk2OWUzZDJjNmYxYjAzNzRlZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjEzLTZjMDhlYTcxIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM4LjAiLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmExMjAyZGUxOTUwNThlNjM2Y2M3MmUxYmY0ZTJmYzY4YzVkZTI0ODZmNDc0NTk2OWUzZDJjNmYxYjAzNzRlZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjEzLTZjMDhlYTcxIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIzMjMwMTAxIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNWFlZjM3MmE5M2YzYWVmMmM0YTU4NTYyZTQ1YWUwMmY4MWFlYTNkZTZmODlmYzI4MzZhZmNlOTc5MmUxMzM3ZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNWFlZjM3MmE5M2YzYWVmMmM0YTU4NTYyZTQ1YWUwMmY4MWFlYTNkZTZmODlmYzI4MzZhZmNlOTc5MmUxMzM3ZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGI5NDIwMzU0NTI1NTY1ZWZmZjRiNjc5MzNlNGM3ZDBiZjliMmEwYjI2MDMxZDZkNTRlNDE4NGZkODFjZWMwYSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGVmY2UxMzQ1MWJlNzI0NmE0MzgzM2EzZGU0MDk1MGZkMWRjZDBmZWQwNWRiNDhkY2IzMzI5MzFkMjFhZjFmNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGVmY2UxMzQ1MWJlNzI0NmE0MzgzM2EzZGU0MDk1MGZkMWRjZDBmZWQwNWRiNDhkY2IzMzI5MzFkMjFhZjFmNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTBhMDY2Y2E2YTkxYjNlYTk4YjdmODUwMTk1N2Q5MDNiMjc1MGU2MmI0YTkxYjRiNjVlZjlkMWM1ODYyOGI0NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: nil, Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, ResizePolicy: nil, ResourceFieldRef: nil, Resources: {}, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1760528834997247006", - ResourceVersion: "1760529013486735017", - ResourceVersion: "1760529131138751006", - ResourceVersion: "1760529169112911006", - ResourceVersion: "1760529189287375006", - ResourceVersion: "1760529329463231017", - ResourceVersion: "1760529386441119006", - ResourceVersion: "1760529448448911006", - ResourceVersion: "1760529499534159006", - ResourceVersion: "1760529624368911017", - ResourceVersion: "1760529674500527006", - ResourceVersion: "1760529773085119017", - ResourceVersion: "1760529950157023017", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-10-15 11:46:40 +0000 UTC", - Time: s"2025-10-15 11:47:14 +0000 UTC", - Time: s"2025-10-15 11:50:13 +0000 UTC", - Time: s"2025-10-15 11:51:54 +0000 UTC", - Time: s"2025-10-15 11:52:11 +0000 UTC", - Time: s"2025-10-15 11:52:18 +0000 UTC", - Time: s"2025-10-15 11:52:49 +0000 UTC", - Time: s"2025-10-15 11:53:08 +0000 UTC", - Time: s"2025-10-15 11:53:09 +0000 UTC", - Time: s"2025-10-15 11:53:10 +0000 UTC", - Time: s"2025-10-15 11:55:29 +0000 UTC", - Time: s"2025-10-15 11:55:40 +0000 UTC", - Time: s"2025-10-15 11:56:26 +0000 UTC", - Time: s"2025-10-15 11:57:13 +0000 UTC", - Time: s"2025-10-15 11:57:28 +0000 UTC", - Time: s"2025-10-15 11:57:55 +0000 UTC", - Time: s"2025-10-15 11:58:19 +0000 UTC", - Time: s"2025-10-15 12:00:24 +0000 UTC", - Time: s"2025-10-15 12:00:37 +0000 UTC", - Time: s"2025-10-15 12:01:14 +0000 UTC", - Time: s"2025-10-15 12:02:37 +0000 UTC", - Time: s"2025-10-15 12:02:53 +0000 UTC", - Time: s"2025-10-15 12:02:55 +0000 UTC", - Time: s"2025-10-15 12:05:50 +0000 UTC", Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "6d518817-5588-48be-9b97-f485ecf1df94", - UID: "71f5c790-8c2e-4c2e-975e-d88565e932d9", + UpdatedReplicas: 0, - UpdatedReplicas: 1, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-proxysql-58f7f4b56d", - UpdateRevision: "some-name-proxysql-649d9994d", - UpdateRevision: "some-name-proxysql-667bdf5c47", - UpdateRevision: "some-name-proxysql-7666bd7cb5", - UpdateRevision: "some-name-proxysql-7b8ddbc4f5", - UpdateRevision: "some-name-proxysql-9df7669d7", - UpdateRevision: "some-name-pxc-55d6545c5", - UpdateRevision: "some-name-pxc-5d64789d4", - UpdateRevision: "some-name-pxc-6688849885", - UpdateRevision: "some-name-pxc-749f7446b6", - UpdateRevision: "some-name-pxc-76d958846f", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ Value: "", + Value: "caching_sha2_password", ValueFrom: nil, ValueFrom: &v1.EnvVarSource{ - Value: "mysql_native_password", VolumeAttributesClassName: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, - VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-29404 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.RdaZ4igyXs ++ mktemp + local LAST_ERR=/tmp/tmp.fNEJKRWFCD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RdaZ4igyXs perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-29404 namespace + cat /tmp/tmp.fNEJKRWFCD + rm /tmp/tmp.RdaZ4igyXs /tmp/tmp.fNEJKRWFCD + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.xR0ZBuRC7g ++ mktemp + local LAST_ERR=/tmp/tmp.v5LrB32CrV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xR0ZBuRC7g No resources found + cat /tmp/tmp.v5LrB32CrV + rm /tmp/tmp.xR0ZBuRC7g /tmp/tmp.v5LrB32CrV + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.tXM2gwArkL ++ mktemp + local LAST_ERR=/tmp/tmp.gU4tgSPfTs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tXM2gwArkL No resources found + cat /tmp/tmp.gU4tgSPfTs + rm /tmp/tmp.tXM2gwArkL /tmp/tmp.gU4tgSPfTs + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.mdiB6P9KYb ++ mktemp + local LAST_ERR=/tmp/tmp.ajXLrQIJLE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mdiB6P9KYb validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.ajXLrQIJLE + rm /tmp/tmp.mdiB6P9KYb /tmp/tmp.ajXLrQIJLE + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + rm -rf /tmp/tmp.sCrDh3TiZm + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator + kubectl_bin delete --grace-period=0 --force=true namespace users-29404 ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.7AEx9Eqjtb + local LAST_OUT=/tmp/tmp.6fkb14hnWM ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.LErvrGvfV2 + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.mUt37hPqSn + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-29404