Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/logs/users-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-30642 + local ns=users-30642 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-21319 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Ezh2QXV9QZ ++ mktemp + local LAST_ERR=/tmp/tmp.13Baq2Gg8C + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ezh2QXV9QZ perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-21319 namespace + cat /tmp/tmp.13Baq2Gg8C + rm /tmp/tmp.Ezh2QXV9QZ /tmp/tmp.13Baq2Gg8C + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.GdbbmFTZ5E ++ mktemp + local LAST_ERR=/tmp/tmp.syaEv54E00 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GdbbmFTZ5E No resources found + cat /tmp/tmp.syaEv54E00 + rm /tmp/tmp.GdbbmFTZ5E /tmp/tmp.syaEv54E00 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.0XwlDH8bYO ++ mktemp + local LAST_ERR=/tmp/tmp.NtWd5yyDCu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0XwlDH8bYO No resources found + cat /tmp/tmp.NtWd5yyDCu + rm /tmp/tmp.0XwlDH8bYO /tmp/tmp.NtWd5yyDCu + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl api-resources ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.jWJjRF9k5x ++ mktemp + local LAST_ERR=/tmp/tmp.K5wVkrOwcE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' ++ mktemp + local LAST_OUT=/tmp/tmp.y4xYgtSdBN ++ mktemp + local LAST_ERR=/tmp/tmp.MNjOfQTnmX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jWJjRF9k5x + cat /tmp/tmp.K5wVkrOwcE + rm /tmp/tmp.jWJjRF9k5x /tmp/tmp.K5wVkrOwcE + return 0 namespace "users-21319" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.y4xYgtSdBN namespace "pxc-operator" deleted + cat /tmp/tmp.MNjOfQTnmX + rm /tmp/tmp.y4xYgtSdBN /tmp/tmp.MNjOfQTnmX + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.ThvPMY0Ltx ++ mktemp + local LAST_ERR=/tmp/tmp.2YLSVy2ycZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ThvPMY0Ltx namespace/pxc-operator created + cat /tmp/tmp.2YLSVy2ycZ + rm /tmp/tmp.ThvPMY0Ltx /tmp/tmp.2YLSVy2ycZ + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.oCd9AI9RkB +++ mktemp ++ local LAST_ERR=/tmp/tmp.DgG5CiFKi6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oCd9AI9RkB ++ cat /tmp/tmp.DgG5CiFKi6 ++ rm /tmp/tmp.oCd9AI9RkB /tmp/tmp.DgG5CiFKi6 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2193-1eb37b20-11-cluster5 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.p9PSCYL7IO ++ mktemp + local LAST_ERR=/tmp/tmp.cUlDdIl0pI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2193-1eb37b20-11-cluster5 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.p9PSCYL7IO Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2193-1eb37b20-11-cluster5" modified. + cat /tmp/tmp.cUlDdIl0pI + rm /tmp/tmp.p9PSCYL7IO /tmp/tmp.cUlDdIl0pI + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.0JevzQ2ysE ++ mktemp + local LAST_ERR=/tmp/tmp.sqXWkEABH1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0JevzQ2ysE customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.sqXWkEABH1 + rm /tmp/tmp.0JevzQ2ysE /tmp/tmp.sqXWkEABH1 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ESTPwlEx1o ++ mktemp + local LAST_ERR=/tmp/tmp.Dve3XtVkzF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ESTPwlEx1o clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.Dve3XtVkzF + rm /tmp/tmp.ESTPwlEx1o /tmp/tmp.Dve3XtVkzF + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/deploy/cw-operator.yaml + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2193-1eb37b20^' + local LAST_OUT=/tmp/tmp.cRINvO5xLy ++ mktemp + local LAST_ERR=/tmp/tmp.LVamT41aMU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cRINvO5xLy deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.LVamT41aMU + rm /tmp/tmp.cRINvO5xLy /tmp/tmp.LVamT41aMU + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.M2VHrLS86f ++ mktemp + local LAST_ERR=/tmp/tmp.7fc0co2jlD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M2VHrLS86f pod/percona-xtradb-cluster-operator-75d958d548-zndnv condition met + cat /tmp/tmp.7fc0co2jlD + rm /tmp/tmp.M2VHrLS86f /tmp/tmp.7fc0co2jlD + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.sk1wzsRhwT +++ mktemp ++ local LAST_ERR=/tmp/tmp.YlpfZ2eCVP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sk1wzsRhwT ++ cat /tmp/tmp.YlpfZ2eCVP ++ rm /tmp/tmp.sk1wzsRhwT /tmp/tmp.YlpfZ2eCVP ++ return 0 + wait_pod percona-xtradb-cluster-operator-75d958d548-zndnv 480 pxc-operator + local pod=percona-xtradb-cluster-operator-75d958d548-zndnv + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-75d958d548-zndnv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-75d958d548-zndnv condition met waiting for pod/percona-xtradb-cluster-operator-75d958d548-zndnv to become Ready.Ok + sleep 3 + create_namespace users-30642 + local namespace=users-30642 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-30642' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-30642 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-30642 + kubectl_bin get ns ++ mktemp + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.fYdr3w76sc ++ mktemp + local LAST_OUT=/tmp/tmp.X1YubtS0V9 + local LAST_ERR=/tmp/tmp.s2QSjQY30i + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ mktemp + local LAST_ERR=/tmp/tmp.tsLH4cBYW4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-30642 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-30642 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fYdr3w76sc + cat /tmp/tmp.s2QSjQY30i + rm /tmp/tmp.fYdr3w76sc /tmp/tmp.s2QSjQY30i + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-30642 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.X1YubtS0V9 + cat /tmp/tmp.tsLH4cBYW4 Error from server (NotFound): namespaces "users-30642" not found + rm /tmp/tmp.X1YubtS0V9 /tmp/tmp.tsLH4cBYW4 + return 1 + : + wait_for_delete namespace/users-30642 + local res=namespace/users-30642 + echo -n 'waiting for namespace/users-30642 to be deleted' waiting for namespace/users-30642 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-30642" not found + desc 'create namespace users-30642' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-30642 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-30642 ++ mktemp + local LAST_OUT=/tmp/tmp.n8zk1bv0pb ++ mktemp + local LAST_ERR=/tmp/tmp.kzY0EquMR1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-30642 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.n8zk1bv0pb namespace/users-30642 created + cat /tmp/tmp.kzY0EquMR1 + rm /tmp/tmp.n8zk1bv0pb /tmp/tmp.kzY0EquMR1 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.OyHUfoIJfX +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xz5f2D2T3Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OyHUfoIJfX ++ cat /tmp/tmp.Xz5f2D2T3Y ++ rm /tmp/tmp.OyHUfoIJfX /tmp/tmp.Xz5f2D2T3Y ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2193-1eb37b20-11-cluster5 --namespace=users-30642 ++ mktemp + local LAST_OUT=/tmp/tmp.7NXYdaXI7y ++ mktemp + local LAST_ERR=/tmp/tmp.RVDHqz5VMw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2193-1eb37b20-11-cluster5 --namespace=users-30642 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7NXYdaXI7y Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2193-1eb37b20-11-cluster5" modified. + cat /tmp/tmp.RVDHqz5VMw + rm /tmp/tmp.7NXYdaXI7y /tmp/tmp.RVDHqz5VMw + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.yYw1P9sqF5 ++ mktemp + local LAST_ERR=/tmp/tmp.vDOgLkP5VB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yYw1P9sqF5 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.vDOgLkP5VB + rm /tmp/tmp.yYw1P9sqF5 /tmp/tmp.vDOgLkP5VB + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.FdGdhXF04m ++ mktemp + local LAST_ERR=/tmp/tmp.VkYEX5OeXA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FdGdhXF04m secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.VkYEX5OeXA + rm /tmp/tmp.FdGdhXF04m /tmp/tmp.VkYEX5OeXA + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/client.yml + kubectl_bin apply -f - ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_OUT=/tmp/tmp.6WBTrEU6Uo + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-30642~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2193-1eb37b20#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_ERR=/tmp/tmp.feSpkTU1jz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#apply:.*#apply: Never#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6WBTrEU6Uo deployment.apps/pxc-client created + cat /tmp/tmp.feSpkTU1jz + rm /tmp/tmp.6WBTrEU6Uo /tmp/tmp.feSpkTU1jz + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2193-1eb37b20#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-30642~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_OUT=/tmp/tmp.0sCXXBKiOr ++ mktemp + local LAST_ERR=/tmp/tmp.fkq4xeKU41 + local exit_status=0 + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0sCXXBKiOr perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.fkq4xeKU41 + rm /tmp/tmp.0sCXXBKiOr /tmp/tmp.fkq4xeKU41 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.WPzbl3mdGP ++++ mktemp +++ local LAST_ERR=/tmp/tmp.o7Ym6SfgbP +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.WPzbl3mdGP +++ cat /tmp/tmp.o7Ym6SfgbP +++ rm /tmp/tmp.WPzbl3mdGP /tmp/tmp.o7Ym6SfgbP +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.PiqwRgVMCF ++++ mktemp +++ local LAST_ERR=/tmp/tmp.hItnEDUnk4 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.PiqwRgVMCF +++ cat /tmp/tmp.hItnEDUnk4 +++ rm /tmp/tmp.PiqwRgVMCF /tmp/tmp.hItnEDUnk4 +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-30642 ++ mktemp + local LAST_OUT=/tmp/tmp.R4kr8XIbEA ++ mktemp + local LAST_ERR=/tmp/tmp.RtGYWiRMRx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-30642 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-30642 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-30642 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.R4kr8XIbEA + cat /tmp/tmp.RtGYWiRMRx error: no matching resources found + rm /tmp/tmp.R4kr8XIbEA /tmp/tmp.RtGYWiRMRx + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.eMXRZyU2l2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YsQffY2Er3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eMXRZyU2l2 ++ cat /tmp/tmp.YsQffY2Er3 ++ rm /tmp/tmp.eMXRZyU2l2 /tmp/tmp.YsQffY2Er3 ++ return 0 + local 'root_pass=p1g*&-kWxT4i=q&vy>,' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MUa6cKdZK9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q3r4NqYGED ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MUa6cKdZK9 ++ cat /tmp/tmp.Q3r4NqYGED ++ rm /tmp/tmp.MUa6cKdZK9 /tmp/tmp.Q3r4NqYGED ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.80SS2ERHEy +++ mktemp ++ local LAST_ERR=/tmp/tmp.540QP4Z8j9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.80SS2ERHEy ++ cat /tmp/tmp.540QP4Z8j9 ++ rm /tmp/tmp.80SS2ERHEy /tmp/tmp.540QP4Z8j9 ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oqhiAszyH6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.viWKtnGOUg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oqhiAszyH6 ++ cat /tmp/tmp.viWKtnGOUg ++ rm /tmp/tmp.oqhiAszyH6 /tmp/tmp.viWKtnGOUg ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1.sql /tmp/tmp.f8TwUdsyjw/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.StnK3vkLbK +++ mktemp ++ local LAST_ERR=/tmp/tmp.t1oI5DitPt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.StnK3vkLbK ++ cat /tmp/tmp.t1oI5DitPt ++ rm /tmp/tmp.StnK3vkLbK /tmp/tmp.t1oI5DitPt ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1.sql /tmp/tmp.f8TwUdsyjw/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SSH9BDyI9C +++ mktemp ++ local LAST_ERR=/tmp/tmp.ttibWGhuxH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SSH9BDyI9C ++ cat /tmp/tmp.ttibWGhuxH ++ rm /tmp/tmp.SSH9BDyI9C /tmp/tmp.ttibWGhuxH ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-1.sql /tmp/tmp.f8TwUdsyjw/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fvk9ccrqhO +++ mktemp ++ local LAST_ERR=/tmp/tmp.nDU0oIQPhk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Fvk9ccrqhO ++ cat /tmp/tmp.nDU0oIQPhk Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.Fvk9ccrqhO /tmp/tmp.nDU0oIQPhk ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.PL8hYxVAWX +++ mktemp ++ local LAST_ERR=/tmp/tmp.48wfapmjE5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PL8hYxVAWX ++ cat /tmp/tmp.48wfapmjE5 ++ rm /tmp/tmp.PL8hYxVAWX /tmp/tmp.48wfapmjE5 ++ return 0 + secret_pass='p1g*&-kWxT4i=q&vy>,' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.p8p6PiVS8T +++ mktemp ++ local LAST_ERR=/tmp/tmp.loEcJfJgEX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p8p6PiVS8T ++ cat /tmp/tmp.loEcJfJgEX ++ rm /tmp/tmp.p8p6PiVS8T /tmp/tmp.loEcJfJgEX ++ return 0 + int_secret_pass='p1g*&-kWxT4i=q&vy>,' + [[ -z p1g*&-kWxT4i=q&vy>, ]] + [[ p1g*&-kWxT4i=q&vy>, != \p\1\g\*\&\-\k\W\x\T\4\i\=\q\&\v\y\>\, ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''p1g*&-kWxT4i=q&vy>,'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3e4U00tg46 +++ mktemp ++ local LAST_ERR=/tmp/tmp.z1iuB1tjDW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3e4U00tg46 ++ cat /tmp/tmp.z1iuB1tjDW ++ rm /tmp/tmp.3e4U00tg46 /tmp/tmp.z1iuB1tjDW ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.f8TwUdsyjw/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.j23Z0gcdFL +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hcyw9gPIwC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j23Z0gcdFL ++ cat /tmp/tmp.Hcyw9gPIwC ++ rm /tmp/tmp.j23Z0gcdFL /tmp/tmp.Hcyw9gPIwC ++ return 0 + secret_pass=y3l1oRbo1X6l29zHMl~ ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.E4Ow3SGshe +++ mktemp ++ local LAST_ERR=/tmp/tmp.0YvtOvlJgu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E4Ow3SGshe ++ cat /tmp/tmp.0YvtOvlJgu ++ rm /tmp/tmp.E4Ow3SGshe /tmp/tmp.0YvtOvlJgu ++ return 0 + int_secret_pass=y3l1oRbo1X6l29zHMl~ + [[ -z y3l1oRbo1X6l29zHMl~ ]] + [[ y3l1oRbo1X6l29zHMl~ != \y\3\l\1\o\R\b\o\1\X\6\l\2\9\z\H\M\l\~ ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''y3l1oRbo1X6l29zHMl~'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''y3l1oRbo1X6l29zHMl~'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''y3l1oRbo1X6l29zHMl~'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''y3l1oRbo1X6l29zHMl~'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BSTQOY9TAC +++ mktemp ++ local LAST_ERR=/tmp/tmp.lU8KRAdSgt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BSTQOY9TAC ++ cat /tmp/tmp.lU8KRAdSgt ++ rm /tmp/tmp.BSTQOY9TAC /tmp/tmp.lU8KRAdSgt ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.f8TwUdsyjw/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.4s27T3Hg8w +++ mktemp ++ local LAST_ERR=/tmp/tmp.PhYyL924x8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4s27T3Hg8w ++ cat /tmp/tmp.PhYyL924x8 ++ rm /tmp/tmp.4s27T3Hg8w /tmp/tmp.PhYyL924x8 ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.cVWHKWNjuS +++ mktemp ++ local LAST_ERR=/tmp/tmp.bdjnXtcQjg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cVWHKWNjuS ++ cat /tmp/tmp.bdjnXtcQjg ++ rm /tmp/tmp.cVWHKWNjuS /tmp/tmp.bdjnXtcQjg ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fzb21gQYvh +++ mktemp ++ local LAST_ERR=/tmp/tmp.R5wjcVJnIt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Fzb21gQYvh ++ cat /tmp/tmp.R5wjcVJnIt ++ rm /tmp/tmp.Fzb21gQYvh /tmp/tmp.R5wjcVJnIt ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.f8TwUdsyjw/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.2Sd2mkry3U +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vak80eQNmy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2Sd2mkry3U ++ cat /tmp/tmp.Vak80eQNmy ++ rm /tmp/tmp.2Sd2mkry3U /tmp/tmp.Vak80eQNmy ++ return 0 + secret_pass='(s(TGzF2Yn0rxhi=#' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.phMNLSE3du +++ mktemp ++ local LAST_ERR=/tmp/tmp.IKSEbuBHwf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.phMNLSE3du ++ cat /tmp/tmp.IKSEbuBHwf ++ rm /tmp/tmp.phMNLSE3du /tmp/tmp.IKSEbuBHwf ++ return 0 + int_secret_pass='(s(TGzF2Yn0rxhi=#' + [[ -z (s(TGzF2Yn0rxhi=# ]] + [[ (s(TGzF2Yn0rxhi=# != \(\s\(\T\G\z\F\2\Y\n\0\r\x\h\i\=\# ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''(s(TGzF2Yn0rxhi=#'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''(s(TGzF2Yn0rxhi=#'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''(s(TGzF2Yn0rxhi=#'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''(s(TGzF2Yn0rxhi=#'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql /tmp/tmp.f8TwUdsyjw/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.teCmIOBnfT +++ mktemp ++ local LAST_ERR=/tmp/tmp.wvGNR60m2v ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.teCmIOBnfT ++ cat /tmp/tmp.wvGNR60m2v ++ rm /tmp/tmp.teCmIOBnfT /tmp/tmp.wvGNR60m2v ++ return 0 + secret_pass='bwU9<=S+vu2(E*hfb' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.3ii7ZGieFu +++ mktemp ++ local LAST_ERR=/tmp/tmp.MZqXJpUHm8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3ii7ZGieFu ++ cat /tmp/tmp.MZqXJpUHm8 ++ rm /tmp/tmp.3ii7ZGieFu /tmp/tmp.MZqXJpUHm8 ++ return 0 + int_secret_pass='bwU9<=S+vu2(E*hfb' + [[ -z bwU9<=S+vu2(E*hfb ]] + [[ bwU9<=S+vu2(E*hfb != \b\w\U\9\<\=\S\+\v\u\2\(\E\*\h\f\b ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''bwU9<=S+vu2(E*hfb'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''bwU9<=S+vu2(E*hfb'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''bwU9<=S+vu2(E*hfb'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''bwU9<=S+vu2(E*hfb'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f85Y7uAYvs +++ mktemp ++ local LAST_ERR=/tmp/tmp.4tFmR190GL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f85Y7uAYvs ++ cat /tmp/tmp.4tFmR190GL ++ rm /tmp/tmp.f85Y7uAYvs /tmp/tmp.4tFmR190GL ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.f8TwUdsyjw/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.DEyva9aBVG +++ mktemp ++ local LAST_ERR=/tmp/tmp.0zsaf3L33c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DEyva9aBVG ++ cat /tmp/tmp.0zsaf3L33c ++ rm /tmp/tmp.DEyva9aBVG /tmp/tmp.0zsaf3L33c ++ return 0 + secret_pass='cf1<=T#8VhIE}e!GQ}I' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.fU40OG86FJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.31GIwiIFi3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fU40OG86FJ ++ cat /tmp/tmp.31GIwiIFi3 ++ rm /tmp/tmp.fU40OG86FJ /tmp/tmp.31GIwiIFi3 ++ return 0 + int_secret_pass='cf1<=T#8VhIE}e!GQ}I' + [[ -z cf1<=T#8VhIE}e!GQ}I ]] + [[ cf1<=T#8VhIE}e!GQ}I != \c\f\1\<\=\T\#\8\V\h\I\E\}\e\!\G\Q\}\I ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''cf1<=T#8VhIE}e!GQ}I'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''cf1<=T#8VhIE}e!GQ}I'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''cf1<=T#8VhIE}e!GQ}I'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''cf1<=T#8VhIE}e!GQ}I'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4CEWs2WcO5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.eRgfkOWupg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4CEWs2WcO5 ++ cat /tmp/tmp.eRgfkOWupg ++ rm /tmp/tmp.4CEWs2WcO5 /tmp/tmp.eRgfkOWupg ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.f8TwUdsyjw/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.FSgUNPJiUD ++ mktemp + local LAST_ERR=/tmp/tmp.djbsQ1FPv1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FSgUNPJiUD secret/my-cluster-secrets patched + cat /tmp/tmp.djbsQ1FPv1 + rm /tmp/tmp.FSgUNPJiUD /tmp/tmp.djbsQ1FPv1 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OZWhsucDZH +++ mktemp ++ local LAST_ERR=/tmp/tmp.3rjsxEkiGS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OZWhsucDZH ++ cat /tmp/tmp.3rjsxEkiGS ++ rm /tmp/tmp.OZWhsucDZH /tmp/tmp.3rjsxEkiGS ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.f8TwUdsyjw/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ISsjnZVsJl ++ mktemp + local LAST_ERR=/tmp/tmp.YYFIQ4uKHD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ISsjnZVsJl perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.YYFIQ4uKHD + rm /tmp/tmp.ISsjnZVsJl /tmp/tmp.YYFIQ4uKHD + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aTiNloUr07 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YxtXrPA9sH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aTiNloUr07 ++ cat /tmp/tmp.YxtXrPA9sH ++ rm /tmp/tmp.aTiNloUr07 /tmp/tmp.YxtXrPA9sH ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.leAJLpTlfw +++ mktemp ++ local LAST_ERR=/tmp/tmp.EKvbdcLJDX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.leAJLpTlfw ++ cat /tmp/tmp.EKvbdcLJDX ++ rm /tmp/tmp.leAJLpTlfw /tmp/tmp.EKvbdcLJDX ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.yzUhNqcuYA ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZTNpNzVWKh +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.yzUhNqcuYA +++++ cat /tmp/tmp.ZTNpNzVWKh +++++ rm /tmp/tmp.yzUhNqcuYA /tmp/tmp.ZTNpNzVWKh +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.U4N5bZMDA8 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.lnXxdvD7Cz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.U4N5bZMDA8 +++++ cat /tmp/tmp.lnXxdvD7Cz +++++ rm /tmp/tmp.U4N5bZMDA8 /tmp/tmp.lnXxdvD7Cz +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J0a9oUkuce +++ mktemp ++ local LAST_ERR=/tmp/tmp.fV9xJAeacF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J0a9oUkuce ++ cat /tmp/tmp.fV9xJAeacF ++ rm /tmp/tmp.J0a9oUkuce /tmp/tmp.fV9xJAeacF ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.JWpr9Cyb1o ++ mktemp + local LAST_ERR=/tmp/tmp.f3x9ey35wC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JWpr9Cyb1o secret/my-cluster-secrets patched + cat /tmp/tmp.f3x9ey35wC + rm /tmp/tmp.JWpr9Cyb1o /tmp/tmp.f3x9ey35wC + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4bQcERZWF6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.O6A02JuDr4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4bQcERZWF6 ++ cat /tmp/tmp.O6A02JuDr4 ++ rm /tmp/tmp.4bQcERZWF6 /tmp/tmp.O6A02JuDr4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uLkY1Iydi3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HNczSBiqHv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uLkY1Iydi3 ++ cat /tmp/tmp.HNczSBiqHv ++ rm /tmp/tmp.uLkY1Iydi3 /tmp/tmp.HNczSBiqHv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fz3b5ReR4A +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ykddf31eUW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Fz3b5ReR4A ++ cat /tmp/tmp.Ykddf31eUW ++ rm /tmp/tmp.Fz3b5ReR4A /tmp/tmp.Ykddf31eUW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7hRwUBxopb +++ mktemp ++ local LAST_ERR=/tmp/tmp.OIBypo8KV9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7hRwUBxopb ++ cat /tmp/tmp.OIBypo8KV9 ++ rm /tmp/tmp.7hRwUBxopb /tmp/tmp.OIBypo8KV9 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mPj7rpSBtj +++ mktemp ++ local LAST_ERR=/tmp/tmp.aaD5OywbE4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mPj7rpSBtj ++ cat /tmp/tmp.aaD5OywbE4 ++ rm /tmp/tmp.mPj7rpSBtj /tmp/tmp.aaD5OywbE4 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.rg2cQCakb7 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.naiVABf3tr +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.rg2cQCakb7 +++++ cat /tmp/tmp.naiVABf3tr +++++ rm /tmp/tmp.rg2cQCakb7 /tmp/tmp.naiVABf3tr +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5JGVrTEW5X ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1b0rbAdf89 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5JGVrTEW5X +++++ cat /tmp/tmp.1b0rbAdf89 +++++ rm /tmp/tmp.5JGVrTEW5X /tmp/tmp.1b0rbAdf89 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TiPtRtbuN3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VsdPNQC49B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TiPtRtbuN3 ++ cat /tmp/tmp.VsdPNQC49B ++ rm /tmp/tmp.TiPtRtbuN3 /tmp/tmp.VsdPNQC49B ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql /tmp/tmp.f8TwUdsyjw/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql /tmp/tmp.f8TwUdsyjw/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-2.sql /tmp/tmp.f8TwUdsyjw/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.6xTlsoMcZS ++ mktemp + local LAST_ERR=/tmp/tmp.wtwrtGnnhk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6xTlsoMcZS perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.wtwrtGnnhk + rm /tmp/tmp.6xTlsoMcZS /tmp/tmp.wtwrtGnnhk + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.D7CWJuEU6f ++ mktemp + local LAST_ERR=/tmp/tmp.3Ws3w062g3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.D7CWJuEU6f secret/my-cluster-secrets patched + cat /tmp/tmp.3Ws3w062g3 + rm /tmp/tmp.D7CWJuEU6f /tmp/tmp.3Ws3w062g3 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dk2k0f2aB4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bIlnZgMA5W ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dk2k0f2aB4 ++ cat /tmp/tmp.bIlnZgMA5W ++ rm /tmp/tmp.Dk2k0f2aB4 /tmp/tmp.bIlnZgMA5W ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q6yfsDSVEm +++ mktemp ++ local LAST_ERR=/tmp/tmp.cRaSz846aM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q6yfsDSVEm ++ cat /tmp/tmp.cRaSz846aM ++ rm /tmp/tmp.Q6yfsDSVEm /tmp/tmp.cRaSz846aM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ObctGIEL4D +++ mktemp ++ local LAST_ERR=/tmp/tmp.tzKG6mpQOp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ObctGIEL4D ++ cat /tmp/tmp.tzKG6mpQOp ++ rm /tmp/tmp.ObctGIEL4D /tmp/tmp.tzKG6mpQOp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZrE40z8V5s +++ mktemp ++ local LAST_ERR=/tmp/tmp.8vMdXxxfQz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZrE40z8V5s ++ cat /tmp/tmp.8vMdXxxfQz ++ rm /tmp/tmp.ZrE40z8V5s /tmp/tmp.8vMdXxxfQz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BZTLAuQKwq +++ mktemp ++ local LAST_ERR=/tmp/tmp.q6gZlKa0Hy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BZTLAuQKwq ++ cat /tmp/tmp.q6gZlKa0Hy ++ rm /tmp/tmp.BZTLAuQKwq /tmp/tmp.q6gZlKa0Hy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RiSqZrxLNB +++ mktemp ++ local LAST_ERR=/tmp/tmp.qwXucG9rnK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RiSqZrxLNB ++ cat /tmp/tmp.qwXucG9rnK ++ rm /tmp/tmp.RiSqZrxLNB /tmp/tmp.qwXucG9rnK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WCVIU0wsty +++ mktemp ++ local LAST_ERR=/tmp/tmp.VJcTBtTMol ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WCVIU0wsty ++ cat /tmp/tmp.VJcTBtTMol ++ rm /tmp/tmp.WCVIU0wsty /tmp/tmp.VJcTBtTMol ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fuBYjJowWG +++ mktemp ++ local LAST_ERR=/tmp/tmp.SYEaxnaAw1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fuBYjJowWG ++ cat /tmp/tmp.SYEaxnaAw1 ++ rm /tmp/tmp.fuBYjJowWG /tmp/tmp.SYEaxnaAw1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l4eTIOWX89 +++ mktemp ++ local LAST_ERR=/tmp/tmp.d5KGxc2ZRb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l4eTIOWX89 ++ cat /tmp/tmp.d5KGxc2ZRb ++ rm /tmp/tmp.l4eTIOWX89 /tmp/tmp.d5KGxc2ZRb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aNztldHW55 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RSxJm3G50G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aNztldHW55 ++ cat /tmp/tmp.RSxJm3G50G ++ rm /tmp/tmp.aNztldHW55 /tmp/tmp.RSxJm3G50G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pHV27PSQqZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.9JVai7fCtB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pHV27PSQqZ ++ cat /tmp/tmp.9JVai7fCtB ++ rm /tmp/tmp.pHV27PSQqZ /tmp/tmp.9JVai7fCtB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TXIf7mLlIE +++ mktemp ++ local LAST_ERR=/tmp/tmp.T9rT3swjsP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TXIf7mLlIE ++ cat /tmp/tmp.T9rT3swjsP ++ rm /tmp/tmp.TXIf7mLlIE /tmp/tmp.T9rT3swjsP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qP2QhSIqFD +++ mktemp ++ local LAST_ERR=/tmp/tmp.YPEKnEC8s6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qP2QhSIqFD ++ cat /tmp/tmp.YPEKnEC8s6 ++ rm /tmp/tmp.qP2QhSIqFD /tmp/tmp.YPEKnEC8s6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0YiGeJ5Hs2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fP8x8z9ejp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0YiGeJ5Hs2 ++ cat /tmp/tmp.fP8x8z9ejp ++ rm /tmp/tmp.0YiGeJ5Hs2 /tmp/tmp.fP8x8z9ejp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JqZ6zAmS5J +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZMSyBnrwwK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JqZ6zAmS5J ++ cat /tmp/tmp.ZMSyBnrwwK ++ rm /tmp/tmp.JqZ6zAmS5J /tmp/tmp.ZMSyBnrwwK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mFTn8gBDbj +++ mktemp ++ local LAST_ERR=/tmp/tmp.E5TZM2f1IK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mFTn8gBDbj ++ cat /tmp/tmp.E5TZM2f1IK ++ rm /tmp/tmp.mFTn8gBDbj /tmp/tmp.E5TZM2f1IK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jAEzhB2yh5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FZK6ihztyi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jAEzhB2yh5 ++ cat /tmp/tmp.FZK6ihztyi ++ rm /tmp/tmp.jAEzhB2yh5 /tmp/tmp.FZK6ihztyi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aNRldbpb6M +++ mktemp ++ local LAST_ERR=/tmp/tmp.g6LtpPq2Jf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aNRldbpb6M ++ cat /tmp/tmp.g6LtpPq2Jf ++ rm /tmp/tmp.aNRldbpb6M /tmp/tmp.g6LtpPq2Jf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KS4I6LOiLw +++ mktemp ++ local LAST_ERR=/tmp/tmp.jWd1tIiEq9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KS4I6LOiLw ++ cat /tmp/tmp.jWd1tIiEq9 ++ rm /tmp/tmp.KS4I6LOiLw /tmp/tmp.jWd1tIiEq9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MvytTWMmLF +++ mktemp ++ local LAST_ERR=/tmp/tmp.MDTttWm0EM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MvytTWMmLF ++ cat /tmp/tmp.MDTttWm0EM ++ rm /tmp/tmp.MvytTWMmLF /tmp/tmp.MDTttWm0EM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8BYr2H434f +++ mktemp ++ local LAST_ERR=/tmp/tmp.BSuxqSsjAF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8BYr2H434f ++ cat /tmp/tmp.BSuxqSsjAF ++ rm /tmp/tmp.8BYr2H434f /tmp/tmp.BSuxqSsjAF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YS57C236vo +++ mktemp ++ local LAST_ERR=/tmp/tmp.xr0OWoyaf6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YS57C236vo ++ cat /tmp/tmp.xr0OWoyaf6 ++ rm /tmp/tmp.YS57C236vo /tmp/tmp.xr0OWoyaf6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KCNQVjr8SX +++ mktemp ++ local LAST_ERR=/tmp/tmp.tBBapp7QId ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KCNQVjr8SX ++ cat /tmp/tmp.tBBapp7QId ++ rm /tmp/tmp.KCNQVjr8SX /tmp/tmp.tBBapp7QId ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nnd9IuHI4J +++ mktemp ++ local LAST_ERR=/tmp/tmp.eoXPWHZfCK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nnd9IuHI4J ++ cat /tmp/tmp.eoXPWHZfCK ++ rm /tmp/tmp.nnd9IuHI4J /tmp/tmp.eoXPWHZfCK ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qTVMIsnjFo +++ mktemp ++ local LAST_ERR=/tmp/tmp.y6iMvUcmL9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qTVMIsnjFo ++ cat /tmp/tmp.y6iMvUcmL9 ++ rm /tmp/tmp.qTVMIsnjFo /tmp/tmp.y6iMvUcmL9 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.hxoR5CkvyQ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xuw40jXgPs +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.hxoR5CkvyQ +++++ cat /tmp/tmp.xuw40jXgPs +++++ rm /tmp/tmp.hxoR5CkvyQ /tmp/tmp.xuw40jXgPs +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vXqrzA1PCy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.lIfolLjEBb +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vXqrzA1PCy +++++ cat /tmp/tmp.lIfolLjEBb +++++ rm /tmp/tmp.vXqrzA1PCy /tmp/tmp.lIfolLjEBb +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7k8BqbXWHv +++ mktemp ++ local LAST_ERR=/tmp/tmp.xKEVzMn9SP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7k8BqbXWHv ++ cat /tmp/tmp.xKEVzMn9SP ++ rm /tmp/tmp.7k8BqbXWHv /tmp/tmp.xKEVzMn9SP ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-3-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-3.sql /tmp/tmp.f8TwUdsyjw/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Sv7zqNPL14 ++ mktemp + local LAST_ERR=/tmp/tmp.1pE89YSbDu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Sv7zqNPL14 secret/my-cluster-secrets patched + cat /tmp/tmp.1pE89YSbDu + rm /tmp/tmp.Sv7zqNPL14 /tmp/tmp.1pE89YSbDu + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.65RZBBotI0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IM6C8n6EBu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.65RZBBotI0 ++ cat /tmp/tmp.IM6C8n6EBu ++ rm /tmp/tmp.65RZBBotI0 /tmp/tmp.IM6C8n6EBu ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BKot9pvmCt +++ mktemp ++ local LAST_ERR=/tmp/tmp.beR25WeOaO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BKot9pvmCt ++ cat /tmp/tmp.beR25WeOaO ++ rm /tmp/tmp.BKot9pvmCt /tmp/tmp.beR25WeOaO ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace {"additional_password": "$A$005$pV\\u0016\\u001d|X\\u0018Si0]\\u0011\\u0006c%\\u00013*C\\u000fbHzPdJbqdMf5BVxyjfpryLYiUX2hrIdZL798Byn1Hy2"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X4zB5IMd2O +++ mktemp ++ local LAST_ERR=/tmp/tmp.O5w1Aq82be ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X4zB5IMd2O ++ cat /tmp/tmp.O5w1Aq82be ++ rm /tmp/tmp.X4zB5IMd2O /tmp/tmp.O5w1Aq82be ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3rOd89fLML +++ mktemp ++ local LAST_ERR=/tmp/tmp.Chqkfn6mcy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3rOd89fLML ++ cat /tmp/tmp.Chqkfn6mcy ++ rm /tmp/tmp.3rOd89fLML /tmp/tmp.Chqkfn6mcy ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e9sjsZEmIJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.QKW1ZTw8xE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e9sjsZEmIJ ++ cat /tmp/tmp.QKW1ZTw8xE ++ rm /tmp/tmp.e9sjsZEmIJ /tmp/tmp.QKW1ZTw8xE ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aLF20TZadR +++ mktemp ++ local LAST_ERR=/tmp/tmp.g11OsTbHkR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aLF20TZadR ++ cat /tmp/tmp.g11OsTbHkR ++ rm /tmp/tmp.aLF20TZadR /tmp/tmp.g11OsTbHkR ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uAAJDoKK29 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dQtLon6IeA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uAAJDoKK29 ++ cat /tmp/tmp.dQtLon6IeA ++ rm /tmp/tmp.uAAJDoKK29 /tmp/tmp.dQtLon6IeA ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mTiBvlKW7V +++ mktemp ++ local LAST_ERR=/tmp/tmp.j75JhxSHFe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mTiBvlKW7V ++ cat /tmp/tmp.j75JhxSHFe ++ rm /tmp/tmp.mTiBvlKW7V /tmp/tmp.j75JhxSHFe ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SoUGTbUrgi +++ mktemp ++ local LAST_ERR=/tmp/tmp.YozDuIPqyX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SoUGTbUrgi ++ cat /tmp/tmp.YozDuIPqyX ++ rm /tmp/tmp.SoUGTbUrgi /tmp/tmp.YozDuIPqyX ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ze8cn8PBzy +++ mktemp ++ local LAST_ERR=/tmp/tmp.0sS8vO3G2E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ze8cn8PBzy ++ cat /tmp/tmp.0sS8vO3G2E ++ rm /tmp/tmp.Ze8cn8PBzy /tmp/tmp.0sS8vO3G2E ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2IHLoV43Cw +++ mktemp ++ local LAST_ERR=/tmp/tmp.F56ELVoiWt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2IHLoV43Cw ++ cat /tmp/tmp.F56ELVoiWt ++ rm /tmp/tmp.2IHLoV43Cw /tmp/tmp.F56ELVoiWt ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KJjReaJt6I +++ mktemp ++ local LAST_ERR=/tmp/tmp.5QCVzoZPmt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KJjReaJt6I ++ cat /tmp/tmp.5QCVzoZPmt ++ rm /tmp/tmp.KJjReaJt6I /tmp/tmp.5QCVzoZPmt ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lw9Uwglzc6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ogdsuRk6TH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lw9Uwglzc6 ++ cat /tmp/tmp.ogdsuRk6TH ++ rm /tmp/tmp.Lw9Uwglzc6 /tmp/tmp.ogdsuRk6TH ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KSzvj1raeQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.HT0lv6Jsio ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KSzvj1raeQ ++ cat /tmp/tmp.HT0lv6Jsio ++ rm /tmp/tmp.KSzvj1raeQ /tmp/tmp.HT0lv6Jsio ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TScyVet4L6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zQo41k1YL5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TScyVet4L6 ++ cat /tmp/tmp.zQo41k1YL5 ++ rm /tmp/tmp.TScyVet4L6 /tmp/tmp.zQo41k1YL5 ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 13 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.plPDK6JOQz +++ mktemp ++ local LAST_ERR=/tmp/tmp.3LA0IqVdUl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.plPDK6JOQz ++ cat /tmp/tmp.3LA0IqVdUl ++ rm /tmp/tmp.plPDK6JOQz /tmp/tmp.3LA0IqVdUl ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 14 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Oz4Cr4LYBh +++ mktemp ++ local LAST_ERR=/tmp/tmp.Nev4DNnAtz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Oz4Cr4LYBh ++ cat /tmp/tmp.Nev4DNnAtz ++ rm /tmp/tmp.Oz4Cr4LYBh /tmp/tmp.Nev4DNnAtz ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 15 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c6gUxRdoHa +++ mktemp ++ local LAST_ERR=/tmp/tmp.ehXeJR5cAI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c6gUxRdoHa ++ cat /tmp/tmp.ehXeJR5cAI ++ rm /tmp/tmp.c6gUxRdoHa /tmp/tmp.ehXeJR5cAI ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 16 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zj3lkq4cRU +++ mktemp ++ local LAST_ERR=/tmp/tmp.NXr3KbqRBz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zj3lkq4cRU ++ cat /tmp/tmp.NXr3KbqRBz ++ rm /tmp/tmp.zj3lkq4cRU /tmp/tmp.NXr3KbqRBz ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NldtJT2ztM +++ mktemp ++ local LAST_ERR=/tmp/tmp.EoudqxXbls ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NldtJT2ztM ++ cat /tmp/tmp.EoudqxXbls ++ rm /tmp/tmp.NldtJT2ztM /tmp/tmp.EoudqxXbls ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4BWSgQWd1z +++ mktemp ++ local LAST_ERR=/tmp/tmp.MeomoJwf5z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4BWSgQWd1z ++ cat /tmp/tmp.MeomoJwf5z ++ rm /tmp/tmp.4BWSgQWd1z /tmp/tmp.MeomoJwf5z ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.bn0FtqyFIW ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1iJvBvcEhE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.bn0FtqyFIW +++++ cat /tmp/tmp.1iJvBvcEhE +++++ rm /tmp/tmp.bn0FtqyFIW /tmp/tmp.1iJvBvcEhE +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8LzzKsBYH1 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.CEmIcuQA0k +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8LzzKsBYH1 +++++ cat /tmp/tmp.CEmIcuQA0k +++++ rm /tmp/tmp.8LzzKsBYH1 /tmp/tmp.CEmIcuQA0k +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fMaLtLJjFp +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bs9WdIdkAA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fMaLtLJjFp ++ cat /tmp/tmp.Bs9WdIdkAA ++ rm /tmp/tmp.fMaLtLJjFp /tmp/tmp.Bs9WdIdkAA ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2P21Q7Jakn +++ mktemp ++ local LAST_ERR=/tmp/tmp.exGP2IANwt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2P21Q7Jakn ++ cat /tmp/tmp.exGP2IANwt ++ rm /tmp/tmp.2P21Q7Jakn /tmp/tmp.exGP2IANwt ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.f8TwUdsyjw/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.6wodiIfA4K ++ mktemp + local LAST_ERR=/tmp/tmp.pBxeXKBuQX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6wodiIfA4K secret/my-cluster-secrets patched + cat /tmp/tmp.pBxeXKBuQX + rm /tmp/tmp.6wodiIfA4K /tmp/tmp.pBxeXKBuQX + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GGA3PaPyUS +++ mktemp ++ local LAST_ERR=/tmp/tmp.5KzJ8Lx6lX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GGA3PaPyUS ++ cat /tmp/tmp.5KzJ8Lx6lX ++ rm /tmp/tmp.GGA3PaPyUS /tmp/tmp.5KzJ8Lx6lX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4PQw2xDlhO +++ mktemp ++ local LAST_ERR=/tmp/tmp.MMU8B9e665 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4PQw2xDlhO ++ cat /tmp/tmp.MMU8B9e665 ++ rm /tmp/tmp.4PQw2xDlhO /tmp/tmp.MMU8B9e665 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1xPOsth6Sg +++ mktemp ++ local LAST_ERR=/tmp/tmp.RYObpn19Qj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1xPOsth6Sg ++ cat /tmp/tmp.RYObpn19Qj ++ rm /tmp/tmp.1xPOsth6Sg /tmp/tmp.RYObpn19Qj ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RKl262HXpe ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Lr7fee0a3Y +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RKl262HXpe +++++ cat /tmp/tmp.Lr7fee0a3Y +++++ rm /tmp/tmp.RKl262HXpe /tmp/tmp.Lr7fee0a3Y +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.uwBwuTx8iD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.JqNMXl1Muy +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.uwBwuTx8iD +++++ cat /tmp/tmp.JqNMXl1Muy +++++ rm /tmp/tmp.uwBwuTx8iD /tmp/tmp.JqNMXl1Muy +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b6NNBbJpfH +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y0antKYeuA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b6NNBbJpfH ++ cat /tmp/tmp.Y0antKYeuA ++ rm /tmp/tmp.b6NNBbJpfH /tmp/tmp.Y0antKYeuA ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3M2aMeTSbe +++ mktemp ++ local LAST_ERR=/tmp/tmp.zEnHfJf5pp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3M2aMeTSbe ++ cat /tmp/tmp.zEnHfJf5pp ++ rm /tmp/tmp.3M2aMeTSbe /tmp/tmp.zEnHfJf5pp ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.f8TwUdsyjw/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.IofDchjT9z ++ mktemp + local LAST_ERR=/tmp/tmp.q7oui8nuDD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IofDchjT9z perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.q7oui8nuDD + rm /tmp/tmp.IofDchjT9z /tmp/tmp.q7oui8nuDD + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9Kb2GLPtlf +++ mktemp ++ local LAST_ERR=/tmp/tmp.TQxj5SAs6b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9Kb2GLPtlf ++ cat /tmp/tmp.TQxj5SAs6b ++ rm /tmp/tmp.9Kb2GLPtlf /tmp/tmp.TQxj5SAs6b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H9Hci9D34E +++ mktemp ++ local LAST_ERR=/tmp/tmp.njGsR9MkX4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H9Hci9D34E ++ cat /tmp/tmp.njGsR9MkX4 ++ rm /tmp/tmp.H9Hci9D34E /tmp/tmp.njGsR9MkX4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KjMGbSyWsL +++ mktemp ++ local LAST_ERR=/tmp/tmp.2563e8DxRe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KjMGbSyWsL ++ cat /tmp/tmp.2563e8DxRe ++ rm /tmp/tmp.KjMGbSyWsL /tmp/tmp.2563e8DxRe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9EAhElXiAM +++ mktemp ++ local LAST_ERR=/tmp/tmp.TAzDzOyQPt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9EAhElXiAM ++ cat /tmp/tmp.TAzDzOyQPt ++ rm /tmp/tmp.9EAhElXiAM /tmp/tmp.TAzDzOyQPt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lHfYDcYc0f +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZekKPPixxE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lHfYDcYc0f ++ cat /tmp/tmp.ZekKPPixxE ++ rm /tmp/tmp.lHfYDcYc0f /tmp/tmp.ZekKPPixxE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.izF7zWsj8K +++ mktemp ++ local LAST_ERR=/tmp/tmp.wyR3PbY7o3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.izF7zWsj8K ++ cat /tmp/tmp.wyR3PbY7o3 ++ rm /tmp/tmp.izF7zWsj8K /tmp/tmp.wyR3PbY7o3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PRhvQiuHCw +++ mktemp ++ local LAST_ERR=/tmp/tmp.uYEs0OPETH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PRhvQiuHCw ++ cat /tmp/tmp.uYEs0OPETH ++ rm /tmp/tmp.PRhvQiuHCw /tmp/tmp.uYEs0OPETH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.feEylgM5qb +++ mktemp ++ local LAST_ERR=/tmp/tmp.oflp9nEVZs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.feEylgM5qb ++ cat /tmp/tmp.oflp9nEVZs ++ rm /tmp/tmp.feEylgM5qb /tmp/tmp.oflp9nEVZs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sNEbqpT1OA +++ mktemp ++ local LAST_ERR=/tmp/tmp.CByikdsET8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sNEbqpT1OA ++ cat /tmp/tmp.CByikdsET8 ++ rm /tmp/tmp.sNEbqpT1OA /tmp/tmp.CByikdsET8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xSDhtwXiPu +++ mktemp ++ local LAST_ERR=/tmp/tmp.hNYlKDVLZ7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xSDhtwXiPu ++ cat /tmp/tmp.hNYlKDVLZ7 ++ rm /tmp/tmp.xSDhtwXiPu /tmp/tmp.hNYlKDVLZ7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.riFNND4GY8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tEwo9ihASx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.riFNND4GY8 ++ cat /tmp/tmp.tEwo9ihASx ++ rm /tmp/tmp.riFNND4GY8 /tmp/tmp.tEwo9ihASx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vpCw7ekxEC +++ mktemp ++ local LAST_ERR=/tmp/tmp.0zu8FJSTC2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vpCw7ekxEC ++ cat /tmp/tmp.0zu8FJSTC2 ++ rm /tmp/tmp.vpCw7ekxEC /tmp/tmp.0zu8FJSTC2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.znG18uJT4C +++ mktemp ++ local LAST_ERR=/tmp/tmp.hOuMVN8AA6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.znG18uJT4C ++ cat /tmp/tmp.hOuMVN8AA6 ++ rm /tmp/tmp.znG18uJT4C /tmp/tmp.hOuMVN8AA6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I2W0piItWt +++ mktemp ++ local LAST_ERR=/tmp/tmp.L9T7MKViUe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I2W0piItWt ++ cat /tmp/tmp.L9T7MKViUe ++ rm /tmp/tmp.I2W0piItWt /tmp/tmp.L9T7MKViUe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vwk8VqT60D +++ mktemp ++ local LAST_ERR=/tmp/tmp.tJELYsW41y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vwk8VqT60D ++ cat /tmp/tmp.tJELYsW41y ++ rm /tmp/tmp.Vwk8VqT60D /tmp/tmp.tJELYsW41y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IcOp9c8m3o +++ mktemp ++ local LAST_ERR=/tmp/tmp.8Wx5sOCegW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IcOp9c8m3o ++ cat /tmp/tmp.8Wx5sOCegW ++ rm /tmp/tmp.IcOp9c8m3o /tmp/tmp.8Wx5sOCegW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pMGjAQYUIg +++ mktemp ++ local LAST_ERR=/tmp/tmp.X3pGJqcYcx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pMGjAQYUIg ++ cat /tmp/tmp.X3pGJqcYcx ++ rm /tmp/tmp.pMGjAQYUIg /tmp/tmp.X3pGJqcYcx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mUuXatDXO7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qR1B8Z2nuk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mUuXatDXO7 ++ cat /tmp/tmp.qR1B8Z2nuk ++ rm /tmp/tmp.mUuXatDXO7 /tmp/tmp.qR1B8Z2nuk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iCaLvr9nqD +++ mktemp ++ local LAST_ERR=/tmp/tmp.mE3TexbTCU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iCaLvr9nqD ++ cat /tmp/tmp.mE3TexbTCU ++ rm /tmp/tmp.iCaLvr9nqD /tmp/tmp.mE3TexbTCU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8XgirmFIcB +++ mktemp ++ local LAST_ERR=/tmp/tmp.8V1Jhh7Aal ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8XgirmFIcB ++ cat /tmp/tmp.8V1Jhh7Aal ++ rm /tmp/tmp.8XgirmFIcB /tmp/tmp.8V1Jhh7Aal ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zrBr8m7ox7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RjS9pR0Gw7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zrBr8m7ox7 ++ cat /tmp/tmp.RjS9pR0Gw7 ++ rm /tmp/tmp.zrBr8m7ox7 /tmp/tmp.RjS9pR0Gw7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AzQutleErT +++ mktemp ++ local LAST_ERR=/tmp/tmp.DXqeLB16Ru ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AzQutleErT ++ cat /tmp/tmp.DXqeLB16Ru ++ rm /tmp/tmp.AzQutleErT /tmp/tmp.DXqeLB16Ru ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kMkWfSlHp9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FXWJem2FGO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kMkWfSlHp9 ++ cat /tmp/tmp.FXWJem2FGO ++ rm /tmp/tmp.kMkWfSlHp9 /tmp/tmp.FXWJem2FGO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q5Dhya6tkm +++ mktemp ++ local LAST_ERR=/tmp/tmp.36CS5tFHyV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q5Dhya6tkm ++ cat /tmp/tmp.36CS5tFHyV ++ rm /tmp/tmp.q5Dhya6tkm /tmp/tmp.36CS5tFHyV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iHQMFBvu0r +++ mktemp ++ local LAST_ERR=/tmp/tmp.1MxSaPI6Zt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iHQMFBvu0r ++ cat /tmp/tmp.1MxSaPI6Zt ++ rm /tmp/tmp.iHQMFBvu0r /tmp/tmp.1MxSaPI6Zt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XsFNaT5w16 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LugdQNEhkZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XsFNaT5w16 ++ cat /tmp/tmp.LugdQNEhkZ ++ rm /tmp/tmp.XsFNaT5w16 /tmp/tmp.LugdQNEhkZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SU81lyGAzY +++ mktemp ++ local LAST_ERR=/tmp/tmp.pI0t1iho8q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SU81lyGAzY ++ cat /tmp/tmp.pI0t1iho8q ++ rm /tmp/tmp.SU81lyGAzY /tmp/tmp.pI0t1iho8q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.60QHiMRavh +++ mktemp ++ local LAST_ERR=/tmp/tmp.WrD6o9lOnG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.60QHiMRavh ++ cat /tmp/tmp.WrD6o9lOnG ++ rm /tmp/tmp.60QHiMRavh /tmp/tmp.WrD6o9lOnG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rx4ONQNOg3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.TSotNijWzK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rx4ONQNOg3 ++ cat /tmp/tmp.TSotNijWzK ++ rm /tmp/tmp.rx4ONQNOg3 /tmp/tmp.TSotNijWzK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fk6U48NqMi +++ mktemp ++ local LAST_ERR=/tmp/tmp.lXCGoMaA18 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fk6U48NqMi ++ cat /tmp/tmp.lXCGoMaA18 ++ rm /tmp/tmp.fk6U48NqMi /tmp/tmp.lXCGoMaA18 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7ndDscfTKc +++ mktemp ++ local LAST_ERR=/tmp/tmp.o53U3QTKQC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7ndDscfTKc ++ cat /tmp/tmp.o53U3QTKQC ++ rm /tmp/tmp.7ndDscfTKc /tmp/tmp.o53U3QTKQC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GhdPgJgHFq +++ mktemp ++ local LAST_ERR=/tmp/tmp.CCzzxoL5M5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GhdPgJgHFq ++ cat /tmp/tmp.CCzzxoL5M5 ++ rm /tmp/tmp.GhdPgJgHFq /tmp/tmp.CCzzxoL5M5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nzpxI6PRl9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1KYSH0RBud ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nzpxI6PRl9 ++ cat /tmp/tmp.1KYSH0RBud ++ rm /tmp/tmp.nzpxI6PRl9 /tmp/tmp.1KYSH0RBud ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.89nf1wsywp +++ mktemp ++ local LAST_ERR=/tmp/tmp.GWItRnZcgU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.89nf1wsywp ++ cat /tmp/tmp.GWItRnZcgU ++ rm /tmp/tmp.89nf1wsywp /tmp/tmp.GWItRnZcgU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VKNVpBSrCW +++ mktemp ++ local LAST_ERR=/tmp/tmp.iDvD1a0nVo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VKNVpBSrCW ++ cat /tmp/tmp.iDvD1a0nVo ++ rm /tmp/tmp.VKNVpBSrCW /tmp/tmp.iDvD1a0nVo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lqZ9pP2vVE +++ mktemp ++ local LAST_ERR=/tmp/tmp.ErADJZRKum ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lqZ9pP2vVE ++ cat /tmp/tmp.ErADJZRKum ++ rm /tmp/tmp.lqZ9pP2vVE /tmp/tmp.ErADJZRKum ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gJSYNk2wIX +++ mktemp ++ local LAST_ERR=/tmp/tmp.pEffEB0Ykv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gJSYNk2wIX ++ cat /tmp/tmp.pEffEB0Ykv ++ rm /tmp/tmp.gJSYNk2wIX /tmp/tmp.pEffEB0Ykv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mh2J5flYHQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.t4c6vIUgMf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Mh2J5flYHQ ++ cat /tmp/tmp.t4c6vIUgMf ++ rm /tmp/tmp.Mh2J5flYHQ /tmp/tmp.t4c6vIUgMf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xHCe6aSjqo +++ mktemp ++ local LAST_ERR=/tmp/tmp.EBCkKS3hle ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xHCe6aSjqo ++ cat /tmp/tmp.EBCkKS3hle ++ rm /tmp/tmp.xHCe6aSjqo /tmp/tmp.EBCkKS3hle ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wF1Z7D5ggU +++ mktemp ++ local LAST_ERR=/tmp/tmp.gmwWcVq6Wq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wF1Z7D5ggU ++ cat /tmp/tmp.gmwWcVq6Wq ++ rm /tmp/tmp.wF1Z7D5ggU /tmp/tmp.gmwWcVq6Wq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8ks8hgRw35 +++ mktemp ++ local LAST_ERR=/tmp/tmp.e3fgo4Px0e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8ks8hgRw35 ++ cat /tmp/tmp.e3fgo4Px0e ++ rm /tmp/tmp.8ks8hgRw35 /tmp/tmp.e3fgo4Px0e ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1Zqg8CuBww +++ mktemp ++ local LAST_ERR=/tmp/tmp.Qy10fH2ecR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1Zqg8CuBww ++ cat /tmp/tmp.Qy10fH2ecR ++ rm /tmp/tmp.1Zqg8CuBww /tmp/tmp.Qy10fH2ecR ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n4QcDQo7Kx +++ mktemp ++ local LAST_ERR=/tmp/tmp.GIE1KOBIP5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n4QcDQo7Kx ++ cat /tmp/tmp.GIE1KOBIP5 ++ rm /tmp/tmp.n4QcDQo7Kx /tmp/tmp.GIE1KOBIP5 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6ltQVexMEy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MtfrhyadEn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6ltQVexMEy +++++ cat /tmp/tmp.MtfrhyadEn +++++ rm /tmp/tmp.6ltQVexMEy /tmp/tmp.MtfrhyadEn +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6nIC264nC6 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZV0DVeYGOJ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6nIC264nC6 +++++ cat /tmp/tmp.ZV0DVeYGOJ +++++ rm /tmp/tmp.6nIC264nC6 /tmp/tmp.ZV0DVeYGOJ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v7wy2jGN5F +++ mktemp ++ local LAST_ERR=/tmp/tmp.c3C4IrrIEi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v7wy2jGN5F ++ cat /tmp/tmp.c3C4IrrIEi ++ rm /tmp/tmp.v7wy2jGN5F /tmp/tmp.c3C4IrrIEi ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.TymNKPrwEh ++ mktemp + local LAST_ERR=/tmp/tmp.RSVfG77vtU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TymNKPrwEh secret/my-cluster-secrets-2 patched + cat /tmp/tmp.RSVfG77vtU + rm /tmp/tmp.TymNKPrwEh /tmp/tmp.RSVfG77vtU + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.58z9xNVyXS +++ mktemp ++ local LAST_ERR=/tmp/tmp.HVEhBAVwCF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.58z9xNVyXS ++ cat /tmp/tmp.HVEhBAVwCF ++ rm /tmp/tmp.58z9xNVyXS /tmp/tmp.HVEhBAVwCF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1UAuQPdE6l +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ybg3w71tVT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1UAuQPdE6l ++ cat /tmp/tmp.Ybg3w71tVT ++ rm /tmp/tmp.1UAuQPdE6l /tmp/tmp.Ybg3w71tVT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z7iVRRrUig +++ mktemp ++ local LAST_ERR=/tmp/tmp.I1AbkDfvn4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z7iVRRrUig ++ cat /tmp/tmp.I1AbkDfvn4 ++ rm /tmp/tmp.Z7iVRRrUig /tmp/tmp.I1AbkDfvn4 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iyBoNP5Lme +++ mktemp ++ local LAST_ERR=/tmp/tmp.EJl8Lw3faz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iyBoNP5Lme ++ cat /tmp/tmp.EJl8Lw3faz ++ rm /tmp/tmp.iyBoNP5Lme /tmp/tmp.EJl8Lw3faz ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.MZkJuCg9us ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dVq7GYfsEj +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.MZkJuCg9us +++++ cat /tmp/tmp.dVq7GYfsEj +++++ rm /tmp/tmp.MZkJuCg9us /tmp/tmp.dVq7GYfsEj +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.bPTJ2yzoIK ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.rogDac72Wv +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.bPTJ2yzoIK +++++ cat /tmp/tmp.rogDac72Wv +++++ rm /tmp/tmp.bPTJ2yzoIK /tmp/tmp.rogDac72Wv +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8XvCjiB05l +++ mktemp ++ local LAST_ERR=/tmp/tmp.u7aIQtg3Pk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8XvCjiB05l ++ cat /tmp/tmp.u7aIQtg3Pk ++ rm /tmp/tmp.8XvCjiB05l /tmp/tmp.u7aIQtg3Pk ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lh9N2t41Oe +++ mktemp ++ local LAST_ERR=/tmp/tmp.wbwnn4chww ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lh9N2t41Oe ++ cat /tmp/tmp.wbwnn4chww ++ rm /tmp/tmp.lh9N2t41Oe /tmp/tmp.wbwnn4chww ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.f8TwUdsyjw/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ys65KRotxb +++ mktemp ++ local LAST_ERR=/tmp/tmp.hnpfJQi2fG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ys65KRotxb ++ cat /tmp/tmp.hnpfJQi2fG ++ rm /tmp/tmp.ys65KRotxb /tmp/tmp.hnpfJQi2fG ++ return 0 + newpass='RZ[+FS3l5HW5Iht!rd' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''RZ[+FS3l5HW5Iht!rd'\'';' '-h some-name-pxc -uroot -p'\''RZ[+FS3l5HW5Iht!rd'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''RZ[+FS3l5HW5Iht!rd'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''RZ[+FS3l5HW5Iht!rd'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T3FktlcVWv +++ mktemp ++ local LAST_ERR=/tmp/tmp.OEX8f7VyUX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T3FktlcVWv ++ cat /tmp/tmp.OEX8f7VyUX ++ rm /tmp/tmp.T3FktlcVWv /tmp/tmp.OEX8f7VyUX ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''RZ[+FS3l5HW5Iht!rd'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''RZ[+FS3l5HW5Iht!rd'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''RZ[+FS3l5HW5Iht!rd'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''RZ[+FS3l5HW5Iht!rd'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vxdMZNLaMk +++ mktemp ++ local LAST_ERR=/tmp/tmp.OeUC17Ycv7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vxdMZNLaMk ++ cat /tmp/tmp.OeUC17Ycv7 ++ rm /tmp/tmp.vxdMZNLaMk /tmp/tmp.OeUC17Ycv7 ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.f8TwUdsyjw/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.zTAlsDYWak +++ mktemp ++ local LAST_ERR=/tmp/tmp.P5oYqdVlbR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zTAlsDYWak ++ cat /tmp/tmp.P5oYqdVlbR ++ rm /tmp/tmp.zTAlsDYWak /tmp/tmp.P5oYqdVlbR ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.W91nbI8jOc ++ mktemp + local LAST_ERR=/tmp/tmp.KlliNHBw5l + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.W91nbI8jOc secret/my-cluster-secrets-2 configured + cat /tmp/tmp.KlliNHBw5l Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.W91nbI8jOc /tmp/tmp.KlliNHBw5l + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fKN2CBzQ9Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.YpiunvfgXY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fKN2CBzQ9Z ++ cat /tmp/tmp.YpiunvfgXY ++ rm /tmp/tmp.fKN2CBzQ9Z /tmp/tmp.YpiunvfgXY ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.f8TwUdsyjw/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_OUT=/tmp/tmp.RKAnXEF4DG + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-30642~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2193-1eb37b20#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + local LAST_ERR=/tmp/tmp.oMXHdyngRx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RKAnXEF4DG perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.oMXHdyngRx + rm /tmp/tmp.RKAnXEF4DG /tmp/tmp.oMXHdyngRx + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kOEVOYcITK +++ mktemp ++ local LAST_ERR=/tmp/tmp.IjnqlosjsQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kOEVOYcITK ++ cat /tmp/tmp.IjnqlosjsQ ++ rm /tmp/tmp.kOEVOYcITK /tmp/tmp.IjnqlosjsQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I9RTXzLyoO +++ mktemp ++ local LAST_ERR=/tmp/tmp.abhhG22JlW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I9RTXzLyoO ++ cat /tmp/tmp.abhhG22JlW ++ rm /tmp/tmp.I9RTXzLyoO /tmp/tmp.abhhG22JlW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UghhH09G5P +++ mktemp ++ local LAST_ERR=/tmp/tmp.rTKXacqw0V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UghhH09G5P ++ cat /tmp/tmp.rTKXacqw0V ++ rm /tmp/tmp.UghhH09G5P /tmp/tmp.rTKXacqw0V ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TpOVZ1WNlH +++ mktemp ++ local LAST_ERR=/tmp/tmp.c0p1jUwfAE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TpOVZ1WNlH ++ cat /tmp/tmp.c0p1jUwfAE ++ rm /tmp/tmp.TpOVZ1WNlH /tmp/tmp.c0p1jUwfAE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LLuc5vOGA3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rciyD7E0am ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LLuc5vOGA3 ++ cat /tmp/tmp.rciyD7E0am ++ rm /tmp/tmp.LLuc5vOGA3 /tmp/tmp.rciyD7E0am ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.usyouJcM30 +++ mktemp ++ local LAST_ERR=/tmp/tmp.brbF6Dwhr0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.usyouJcM30 ++ cat /tmp/tmp.brbF6Dwhr0 ++ rm /tmp/tmp.usyouJcM30 /tmp/tmp.brbF6Dwhr0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rlhE8aaWkI +++ mktemp ++ local LAST_ERR=/tmp/tmp.dYhr62LK6i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rlhE8aaWkI ++ cat /tmp/tmp.dYhr62LK6i ++ rm /tmp/tmp.rlhE8aaWkI /tmp/tmp.dYhr62LK6i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a48XP1lrHu +++ mktemp ++ local LAST_ERR=/tmp/tmp.vYpuXDeYQH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a48XP1lrHu ++ cat /tmp/tmp.vYpuXDeYQH ++ rm /tmp/tmp.a48XP1lrHu /tmp/tmp.vYpuXDeYQH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X5X0GQXCxY +++ mktemp ++ local LAST_ERR=/tmp/tmp.XomX5n2idQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X5X0GQXCxY ++ cat /tmp/tmp.XomX5n2idQ ++ rm /tmp/tmp.X5X0GQXCxY /tmp/tmp.XomX5n2idQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IYlpOVcb7Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.76dXXhGGao ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IYlpOVcb7Z ++ cat /tmp/tmp.76dXXhGGao ++ rm /tmp/tmp.IYlpOVcb7Z /tmp/tmp.76dXXhGGao ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ukuOOZy3gj +++ mktemp ++ local LAST_ERR=/tmp/tmp.6Zg4wWWohJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ukuOOZy3gj ++ cat /tmp/tmp.6Zg4wWWohJ ++ rm /tmp/tmp.ukuOOZy3gj /tmp/tmp.6Zg4wWWohJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OkXGa5upwS +++ mktemp ++ local LAST_ERR=/tmp/tmp.xhIVl8z1t6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OkXGa5upwS ++ cat /tmp/tmp.xhIVl8z1t6 ++ rm /tmp/tmp.OkXGa5upwS /tmp/tmp.xhIVl8z1t6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0Je3FtFhss +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z1Hhn9sw4Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0Je3FtFhss ++ cat /tmp/tmp.Z1Hhn9sw4Y ++ rm /tmp/tmp.0Je3FtFhss /tmp/tmp.Z1Hhn9sw4Y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ntlgtzZECj +++ mktemp ++ local LAST_ERR=/tmp/tmp.v6tFasoOR5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ntlgtzZECj ++ cat /tmp/tmp.v6tFasoOR5 ++ rm /tmp/tmp.ntlgtzZECj /tmp/tmp.v6tFasoOR5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.718G7bc258 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3A2FSJpMUy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.718G7bc258 ++ cat /tmp/tmp.3A2FSJpMUy ++ rm /tmp/tmp.718G7bc258 /tmp/tmp.3A2FSJpMUy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QpOZT30qL3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hLaLERGFSY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QpOZT30qL3 ++ cat /tmp/tmp.hLaLERGFSY ++ rm /tmp/tmp.QpOZT30qL3 /tmp/tmp.hLaLERGFSY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ba3ZhPc3Gf +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZrXNP4GDue ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ba3ZhPc3Gf ++ cat /tmp/tmp.ZrXNP4GDue ++ rm /tmp/tmp.ba3ZhPc3Gf /tmp/tmp.ZrXNP4GDue ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d1FGsNE0zK +++ mktemp ++ local LAST_ERR=/tmp/tmp.cQ0D8D7PKM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d1FGsNE0zK ++ cat /tmp/tmp.cQ0D8D7PKM ++ rm /tmp/tmp.d1FGsNE0zK /tmp/tmp.cQ0D8D7PKM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Os46AEje3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kLFp3ksiBY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7Os46AEje3 ++ cat /tmp/tmp.kLFp3ksiBY ++ rm /tmp/tmp.7Os46AEje3 /tmp/tmp.kLFp3ksiBY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GmuVwhSv7m +++ mktemp ++ local LAST_ERR=/tmp/tmp.hSvd1iSnTC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GmuVwhSv7m ++ cat /tmp/tmp.hSvd1iSnTC ++ rm /tmp/tmp.GmuVwhSv7m /tmp/tmp.hSvd1iSnTC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oX6KmYG73r +++ mktemp ++ local LAST_ERR=/tmp/tmp.yovzzR0UCd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oX6KmYG73r ++ cat /tmp/tmp.yovzzR0UCd ++ rm /tmp/tmp.oX6KmYG73r /tmp/tmp.yovzzR0UCd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o9Dd6Ey358 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VmjQHBqQYN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o9Dd6Ey358 ++ cat /tmp/tmp.VmjQHBqQYN ++ rm /tmp/tmp.o9Dd6Ey358 /tmp/tmp.VmjQHBqQYN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U29oFTIsqg +++ mktemp ++ local LAST_ERR=/tmp/tmp.MCYzJI1gWV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U29oFTIsqg ++ cat /tmp/tmp.MCYzJI1gWV ++ rm /tmp/tmp.U29oFTIsqg /tmp/tmp.MCYzJI1gWV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7FPf05DeIQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.jD05zw3TyY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7FPf05DeIQ ++ cat /tmp/tmp.jD05zw3TyY ++ rm /tmp/tmp.7FPf05DeIQ /tmp/tmp.jD05zw3TyY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5O8GpZVec5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.p4UzmE1KiO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5O8GpZVec5 ++ cat /tmp/tmp.p4UzmE1KiO ++ rm /tmp/tmp.5O8GpZVec5 /tmp/tmp.p4UzmE1KiO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q4GpR1J8Ay +++ mktemp ++ local LAST_ERR=/tmp/tmp.5ij2hffF2X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q4GpR1J8Ay ++ cat /tmp/tmp.5ij2hffF2X ++ rm /tmp/tmp.q4GpR1J8Ay /tmp/tmp.5ij2hffF2X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cDpbjxVp02 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4BoBfVVZzh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cDpbjxVp02 ++ cat /tmp/tmp.4BoBfVVZzh ++ rm /tmp/tmp.cDpbjxVp02 /tmp/tmp.4BoBfVVZzh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L3Om7mxNId +++ mktemp ++ local LAST_ERR=/tmp/tmp.wUDVFwrEKc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L3Om7mxNId ++ cat /tmp/tmp.wUDVFwrEKc ++ rm /tmp/tmp.L3Om7mxNId /tmp/tmp.wUDVFwrEKc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HTz5cln5DS +++ mktemp ++ local LAST_ERR=/tmp/tmp.q6EH0XUHNJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HTz5cln5DS ++ cat /tmp/tmp.q6EH0XUHNJ ++ rm /tmp/tmp.HTz5cln5DS /tmp/tmp.q6EH0XUHNJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pjLPZnnMcD +++ mktemp ++ local LAST_ERR=/tmp/tmp.4ITO69kgPs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pjLPZnnMcD ++ cat /tmp/tmp.4ITO69kgPs ++ rm /tmp/tmp.pjLPZnnMcD /tmp/tmp.4ITO69kgPs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kWPRMgYuoe +++ mktemp ++ local LAST_ERR=/tmp/tmp.PfUn41yL35 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kWPRMgYuoe ++ cat /tmp/tmp.PfUn41yL35 ++ rm /tmp/tmp.kWPRMgYuoe /tmp/tmp.PfUn41yL35 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CBetrBUg7o +++ mktemp ++ local LAST_ERR=/tmp/tmp.HgsYKvpi9f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CBetrBUg7o ++ cat /tmp/tmp.HgsYKvpi9f ++ rm /tmp/tmp.CBetrBUg7o /tmp/tmp.HgsYKvpi9f ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.78Jc5Az8vC +++ mktemp ++ local LAST_ERR=/tmp/tmp.p1gFjzF1Am ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.78Jc5Az8vC ++ cat /tmp/tmp.p1gFjzF1Am ++ rm /tmp/tmp.78Jc5Az8vC /tmp/tmp.p1gFjzF1Am ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vhgj906sVL +++ mktemp ++ local LAST_ERR=/tmp/tmp.E1OunGkoba ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vhgj906sVL ++ cat /tmp/tmp.E1OunGkoba ++ rm /tmp/tmp.vhgj906sVL /tmp/tmp.E1OunGkoba ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wd1gQbPhwB +++ mktemp ++ local LAST_ERR=/tmp/tmp.NU9YWOZB9R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Wd1gQbPhwB ++ cat /tmp/tmp.NU9YWOZB9R ++ rm /tmp/tmp.Wd1gQbPhwB /tmp/tmp.NU9YWOZB9R ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jp4JrN0MQP +++ mktemp ++ local LAST_ERR=/tmp/tmp.56G6uQpVuf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Jp4JrN0MQP ++ cat /tmp/tmp.56G6uQpVuf ++ rm /tmp/tmp.Jp4JrN0MQP /tmp/tmp.56G6uQpVuf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LRrY4E4jIV +++ mktemp ++ local LAST_ERR=/tmp/tmp.rZirlGOpeX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LRrY4E4jIV ++ cat /tmp/tmp.rZirlGOpeX ++ rm /tmp/tmp.LRrY4E4jIV /tmp/tmp.rZirlGOpeX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gkLZb66naH +++ mktemp ++ local LAST_ERR=/tmp/tmp.zi6NsEKJhy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gkLZb66naH ++ cat /tmp/tmp.zi6NsEKJhy ++ rm /tmp/tmp.gkLZb66naH /tmp/tmp.zi6NsEKJhy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LkmddGn6dV +++ mktemp ++ local LAST_ERR=/tmp/tmp.BsVNha5lha ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LkmddGn6dV ++ cat /tmp/tmp.BsVNha5lha ++ rm /tmp/tmp.LkmddGn6dV /tmp/tmp.BsVNha5lha ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EGDaVzzlHj +++ mktemp ++ local LAST_ERR=/tmp/tmp.kt8jjCwrYL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EGDaVzzlHj ++ cat /tmp/tmp.kt8jjCwrYL ++ rm /tmp/tmp.EGDaVzzlHj /tmp/tmp.kt8jjCwrYL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n1NfgDORdo +++ mktemp ++ local LAST_ERR=/tmp/tmp.FYB00J75xR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n1NfgDORdo ++ cat /tmp/tmp.FYB00J75xR ++ rm /tmp/tmp.n1NfgDORdo /tmp/tmp.FYB00J75xR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DZaFyoEr3d +++ mktemp ++ local LAST_ERR=/tmp/tmp.cbxou6EVYI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DZaFyoEr3d ++ cat /tmp/tmp.cbxou6EVYI ++ rm /tmp/tmp.DZaFyoEr3d /tmp/tmp.cbxou6EVYI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e21A5fmSSR +++ mktemp ++ local LAST_ERR=/tmp/tmp.kKWbSFEXvW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e21A5fmSSR ++ cat /tmp/tmp.kKWbSFEXvW ++ rm /tmp/tmp.e21A5fmSSR /tmp/tmp.kKWbSFEXvW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AJbaoXnNHz +++ mktemp ++ local LAST_ERR=/tmp/tmp.MqeVcn1hCS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AJbaoXnNHz ++ cat /tmp/tmp.MqeVcn1hCS ++ rm /tmp/tmp.AJbaoXnNHz /tmp/tmp.MqeVcn1hCS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Blc1P2WZ89 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tV0p5zEU5X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Blc1P2WZ89 ++ cat /tmp/tmp.tV0p5zEU5X ++ rm /tmp/tmp.Blc1P2WZ89 /tmp/tmp.tV0p5zEU5X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TF8K1xDYX5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.iKiLLh7uJc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TF8K1xDYX5 ++ cat /tmp/tmp.iKiLLh7uJc ++ rm /tmp/tmp.TF8K1xDYX5 /tmp/tmp.iKiLLh7uJc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vNPVmpWIRY +++ mktemp ++ local LAST_ERR=/tmp/tmp.X9fRgiFKvF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vNPVmpWIRY ++ cat /tmp/tmp.X9fRgiFKvF ++ rm /tmp/tmp.vNPVmpWIRY /tmp/tmp.X9fRgiFKvF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ra0fxStCan +++ mktemp ++ local LAST_ERR=/tmp/tmp.NKAk6Av675 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ra0fxStCan ++ cat /tmp/tmp.NKAk6Av675 ++ rm /tmp/tmp.ra0fxStCan /tmp/tmp.NKAk6Av675 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sPKyGhiv4s +++ mktemp ++ local LAST_ERR=/tmp/tmp.yBsKmXSRP9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sPKyGhiv4s ++ cat /tmp/tmp.yBsKmXSRP9 ++ rm /tmp/tmp.sPKyGhiv4s /tmp/tmp.yBsKmXSRP9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3ppLX82svR +++ mktemp ++ local LAST_ERR=/tmp/tmp.S77n2bqAoK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3ppLX82svR ++ cat /tmp/tmp.S77n2bqAoK ++ rm /tmp/tmp.3ppLX82svR /tmp/tmp.S77n2bqAoK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ws7HXht9Nj +++ mktemp ++ local LAST_ERR=/tmp/tmp.2arT8RkFa6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ws7HXht9Nj ++ cat /tmp/tmp.2arT8RkFa6 ++ rm /tmp/tmp.ws7HXht9Nj /tmp/tmp.2arT8RkFa6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aswKs115Kg +++ mktemp ++ local LAST_ERR=/tmp/tmp.LRObYQE2zR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aswKs115Kg ++ cat /tmp/tmp.LRObYQE2zR ++ rm /tmp/tmp.aswKs115Kg /tmp/tmp.LRObYQE2zR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 51 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.20TITXLr8y +++ mktemp ++ local LAST_ERR=/tmp/tmp.KGLrQleCHo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.20TITXLr8y ++ cat /tmp/tmp.KGLrQleCHo ++ rm /tmp/tmp.20TITXLr8y /tmp/tmp.KGLrQleCHo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 52 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3yym2ro3L8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NFraHOUpRe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3yym2ro3L8 ++ cat /tmp/tmp.NFraHOUpRe ++ rm /tmp/tmp.3yym2ro3L8 /tmp/tmp.NFraHOUpRe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 53 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CuQJHJRi7h +++ mktemp ++ local LAST_ERR=/tmp/tmp.wApQQ6ttWQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CuQJHJRi7h ++ cat /tmp/tmp.wApQQ6ttWQ ++ rm /tmp/tmp.CuQJHJRi7h /tmp/tmp.wApQQ6ttWQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 54 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jaWmDpv9xx +++ mktemp ++ local LAST_ERR=/tmp/tmp.fUGZWRoQ2w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jaWmDpv9xx ++ cat /tmp/tmp.fUGZWRoQ2w ++ rm /tmp/tmp.jaWmDpv9xx /tmp/tmp.fUGZWRoQ2w ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ncXUv0FmHd +++ mktemp ++ local LAST_ERR=/tmp/tmp.3RHA0u5zgh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ncXUv0FmHd ++ cat /tmp/tmp.3RHA0u5zgh ++ rm /tmp/tmp.ncXUv0FmHd /tmp/tmp.3RHA0u5zgh ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.sMF2ofMAc2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Q1WZoGxvoz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.sMF2ofMAc2 +++++ cat /tmp/tmp.Q1WZoGxvoz +++++ rm /tmp/tmp.sMF2ofMAc2 /tmp/tmp.Q1WZoGxvoz +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.stxdPA91xC +++ mktemp ++ local LAST_ERR=/tmp/tmp.xL5am2CMWQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.stxdPA91xC ++ cat /tmp/tmp.xL5am2CMWQ ++ rm /tmp/tmp.stxdPA91xC /tmp/tmp.xL5am2CMWQ ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dLDnLSg2GP +++ mktemp ++ local LAST_ERR=/tmp/tmp.oroFHP79J2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dLDnLSg2GP ++ cat /tmp/tmp.oroFHP79J2 ++ rm /tmp/tmp.dLDnLSg2GP /tmp/tmp.oroFHP79J2 ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.0Nf9qn5NEo ++ mktemp + local LAST_ERR=/tmp/tmp.mUTw4FIeDC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0Nf9qn5NEo secret/my-cluster-secrets patched + cat /tmp/tmp.mUTw4FIeDC + rm /tmp/tmp.0Nf9qn5NEo /tmp/tmp.mUTw4FIeDC + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OloY79MbeD +++ mktemp ++ local LAST_ERR=/tmp/tmp.hg4tj9nKUA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OloY79MbeD ++ cat /tmp/tmp.hg4tj9nKUA ++ rm /tmp/tmp.OloY79MbeD /tmp/tmp.hg4tj9nKUA ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bHPO17suid +++ mktemp ++ local LAST_ERR=/tmp/tmp.e0btsLa2sx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bHPO17suid ++ cat /tmp/tmp.e0btsLa2sx ++ rm /tmp/tmp.bHPO17suid /tmp/tmp.e0btsLa2sx ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AWfJBCeFmr ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.EAgVZ4giDk +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AWfJBCeFmr +++++ cat /tmp/tmp.EAgVZ4giDk +++++ rm /tmp/tmp.AWfJBCeFmr /tmp/tmp.EAgVZ4giDk +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b99DNs86za +++ mktemp ++ local LAST_ERR=/tmp/tmp.qab0YnwpDp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b99DNs86za ++ cat /tmp/tmp.qab0YnwpDp ++ rm /tmp/tmp.b99DNs86za /tmp/tmp.qab0YnwpDp ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lndkICPrGd +++ mktemp ++ local LAST_ERR=/tmp/tmp.2yqIAh2AFx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lndkICPrGd ++ cat /tmp/tmp.2yqIAh2AFx ++ rm /tmp/tmp.lndkICPrGd /tmp/tmp.2yqIAh2AFx ++ return 0 + client_pod=pxc-client-59944c5bbf-55x4s + wait_pod pxc-client-59944c5bbf-55x4s + local pod=pxc-client-59944c5bbf-55x4s + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-55x4s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-55x4s condition met waiting for pod/pxc-client-59944c5bbf-55x4s to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.f8TwUdsyjw/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2193/e2e-tests/users/compare/select-3.sql /tmp/tmp.f8TwUdsyjw/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.En6Oj9trig +++ mktemp ++ local LAST_ERR=/tmp/tmp.F8r4OnsSxX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.En6Oj9trig ++ cat /tmp/tmp.F8r4OnsSxX ++ rm /tmp/tmp.En6Oj9trig /tmp/tmp.F8r4OnsSxX ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-30642 + local namespace=users-30642 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' ++ get_operator_pod + grep -v 'the object has been modified' ++ local label_prefix=app.kubernetes.io/ + grep -v 'get backup status: Job.batch' + sort -u + grep -v level=info + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.f8TwUdsyjw/operator.log +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.AHdJ46ampR +++ mktemp ++ local LAST_ERR=/tmp/tmp.7cAh5T8xa4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AHdJ46ampR ++ cat /tmp/tmp.7cAh5T8xa4 ++ rm /tmp/tmp.AHdJ46ampR /tmp/tmp.7cAh5T8xa4 ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-75d958d548-zndnv ++ mktemp + local LAST_OUT=/tmp/tmp.T8OMP1nKqa ++ mktemp + local LAST_ERR=/tmp/tmp.RuXe3mvmYr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-75d958d548-zndnv + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.T8OMP1nKqa + cat /tmp/tmp.RuXe3mvmYr + rm /tmp/tmp.T8OMP1nKqa /tmp/tmp.RuXe3mvmYr + return 0 2025-11-27T11:58:23.056Z INFO setup Manager starting up {"gitCommit": "1eb37b20ea39a043846a217c61acb7bcd9d0d5c9", "gitBranch": "PR-2193-1eb37b20", "buildTime": "2025-11-27T09:39:01Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-11-27T11:58:23.056Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1454000"} 2025-11-27T11:58:23.060Z INFO setup Registering Components. 2025-11-27T11:58:23.521Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-11-27T11:58:23.521Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-11-27T11:58:23.521Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-11-27T11:58:23.521Z INFO controller-runtime.metrics Starting metrics server 2025-11-27T11:58:23.521Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-11-27T11:58:23.521Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-11-27T11:58:23.521Z INFO controller-runtime.webhook Starting webhook server 2025-11-27T11:58:23.521Z INFO setup Starting the Cmd. 2025-11-27T11:58:23.521Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-11-27T11:58:23.622Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-11-27T11:58:23.650Z DEBUG events percona-xtradb-cluster-operator-75d958d548-zndnv_c6a549e8-17d8-4e9b-9be5-0f7aa1b27ae5 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"e118813d-45ec-4541-9afe-389453043c58","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1764244703644095009"}, "reason": "LeaderElection"} 2025-11-27T11:58:23.650Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-11-27T11:58:23.650Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-11-27T11:58:23.651Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-11-27T11:58:23.651Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-11-27T11:58:23.651Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-11-27T11:58:23.751Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-11-27T11:58:23.751Z INFO Starting Controller {"controller": "pxc-controller"} 2025-11-27T11:58:23.751Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-11-27T11:58:23.751Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-11-27T11:58:23.751Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-11-27T11:58:23.751Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-11-27T11:58:58.670Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c68cd045-a54c-4cd2-8666-cb317a5986ed", "version": "1.19.0"} 2025-11-27T11:58:58.801Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c68cd045-a54c-4cd2-8666-cb317a5986ed", "secrets": "my-cluster-secrets"} 2025-11-27T11:58:59.021Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c68cd045-a54c-4cd2-8666-cb317a5986ed", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-27T11:58:59.138Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c68cd045-a54c-4cd2-8666-cb317a5986ed", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-27T11:58:59.186Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c68cd045-a54c-4cd2-8666-cb317a5986ed", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-27T11:58:59.285Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c68cd045-a54c-4cd2-8666-cb317a5986ed", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-27T11:58:59.332Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c68cd045-a54c-4cd2-8666-cb317a5986ed", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-27T11:58:59.426Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c68cd045-a54c-4cd2-8666-cb317a5986ed", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-27T11:58:59.579Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c68cd045-a54c-4cd2-8666-cb317a5986ed", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-27T11:59:00.529Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "5e845683-70cc-447a-8757-5282bda267b6", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-27T11:59:00.602Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "5e845683-70cc-447a-8757-5282bda267b6", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-27T12:00:17.338Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "30a54088-c765-4658-9347-877068fec30d", "user": "operator"} 2025-11-27T12:00:17.372Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "30a54088-c765-4658-9347-877068fec30d", "user": "monitor"} 2025-11-27T12:00:17.420Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "30a54088-c765-4658-9347-877068fec30d"} 2025-11-27T12:00:17.469Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "30a54088-c765-4658-9347-877068fec30d"} 2025-11-27T12:00:17.503Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "30a54088-c765-4658-9347-877068fec30d", "user": "xtrabackup"} 2025-11-27T12:00:17.545Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "30a54088-c765-4658-9347-877068fec30d"} 2025-11-27T12:00:17.577Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "30a54088-c765-4658-9347-877068fec30d", "user": "replication"} 2025-11-27T12:00:17.585Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "30a54088-c765-4658-9347-877068fec30d", "err": "get primary pxc pod: not found"} 2025-11-27T12:00:22.287Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "e12d7b1e-b953-4f3d-9d32-be2d4411f5c1", "err": "get primary pxc pod: not found"} 2025-11-27T12:00:27.441Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "39fa21e9-d96d-42d5-997e-929d2979f36f", "err": "get primary pxc pod: not found"} 2025-11-27T12:00:32.643Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "a3a0d68d-2eb1-4be7-bc78-d221bf44e01d", "err": "get primary pxc pod: not found"} 2025-11-27T12:02:43.807Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d2d3aec1-5787-42ca-96fb-6adfe7a5e1da", "user": "root"} 2025-11-27T12:02:43.962Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d2d3aec1-5787-42ca-96fb-6adfe7a5e1da", "new version": "8.0.43-34.1"} 2025-11-27T12:02:45.521Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d2d3aec1-5787-42ca-96fb-6adfe7a5e1da"} 2025-11-27T12:02:50.660Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "e4712e50-d92a-4861-8681-0384ccff6500"} 2025-11-27T12:02:55.952Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "5ad8951f-3d55-470b-a46d-9501b556a317"} 2025-11-27T12:03:01.320Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "6e3db760-1e22-4345-87e9-28ca22b67bd5"} 2025-11-27T12:03:06.646Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "66e1821c-e6f1-479f-b882-41bc378fee95"} 2025-11-27T12:03:11.865Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "9ce9533a-1f45-4230-b2be-d5b14521329a"} 2025-11-27T12:03:17.630Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "72df6727-bb20-4865-b812-ca424bc19f21"} 2025-11-27T12:03:22.955Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "74f763a4-4cc7-4b5e-adb0-94c38fe64865"} 2025-11-27T12:03:28.159Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "55ed4d85-5a79-4b53-ad16-64f481e6c6c0"} 2025-11-27T12:03:33.557Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4cabd81c-d97a-4739-99ce-15dd377248c9"} 2025-11-27T12:03:39.052Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "47777275-00dc-4cf7-901d-10e11ac717cf"} 2025-11-27T12:03:44.162Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "5c5dc163-59d7-484e-9817-0f05f1698684"} 2025-11-27T12:03:49.457Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "3de619d7-6452-48f1-b8eb-cd4bd8b50172"} 2025-11-27T12:03:54.530Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "05e70e7e-6a4d-431f-9581-26c41642c466"} 2025-11-27T12:04:00.254Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "9326c0fe-f51f-49ed-850f-3f8b555cf7e8"} 2025-11-27T12:04:05.421Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "294398c6-7b17-4679-a01a-a9a42d1979ea"} 2025-11-27T12:04:06.917Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "391d10c6-5745-49f6-960d-5f3b0cd08f06", "user": "root"} 2025-11-27T12:04:06.939Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "391d10c6-5745-49f6-960d-5f3b0cd08f06", "user": "root"} 2025-11-27T12:04:06.958Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "391d10c6-5745-49f6-960d-5f3b0cd08f06", "secret": "some-name-mysql-init", "user": "root"} 2025-11-27T12:04:09.458Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "391d10c6-5745-49f6-960d-5f3b0cd08f06"} 2025-11-27T12:04:09.493Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "391d10c6-5745-49f6-960d-5f3b0cd08f06", "user": "root"} 2025-11-27T12:04:09.512Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "391d10c6-5745-49f6-960d-5f3b0cd08f06", "user": "root"} 2025-11-27T12:04:11.242Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "391d10c6-5745-49f6-960d-5f3b0cd08f06"} 2025-11-27T12:04:17.320Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "2dc0ae48-f095-4e2e-9c54-c4c77c815efd"} 2025-11-27T12:04:22.926Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "6076f79b-0299-4ba5-a2bb-63c0372216c8"} 2025-11-27T12:04:26.218Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "dc36f071-d118-4afb-a153-a18e32e207c6", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:04:26.290Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "dc36f071-d118-4afb-a153-a18e32e207c6", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:04:29.108Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "dc36f071-d118-4afb-a153-a18e32e207c6", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:04:47.990Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c4cbf419-502e-466a-a55d-f3d4d3eafcd5", "err": "get primary pxc pod: not found"} 2025-11-27T12:04:48.750Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "0e0f5cc9-e87a-44bc-a916-f31a5ed7d220", "err": "get primary pxc pod: not found"} 2025-11-27T12:04:52.049Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "2e9c2c93-f8f1-46d2-9afa-fdaf10508f8f", "user": "proxyadmin"} 2025-11-27T12:04:52.049Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "2e9c2c93-f8f1-46d2-9afa-fdaf10508f8f", "user": "proxyadmin"} 2025-11-27T12:04:52.077Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "2e9c2c93-f8f1-46d2-9afa-fdaf10508f8f", "user": "proxyadmin"} 2025-11-27T12:04:52.102Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "2e9c2c93-f8f1-46d2-9afa-fdaf10508f8f", "user": "proxyadmin"} 2025-11-27T12:04:52.102Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "2e9c2c93-f8f1-46d2-9afa-fdaf10508f8f", "last-applied-secret": "38e3fcecd16efc6d079552900b5a55bf0fe89d5ae9298e755e339635d6711157"} 2025-11-27T12:04:52.106Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "2e9c2c93-f8f1-46d2-9afa-fdaf10508f8f", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:04:52.675Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "0e0f5cc9-e87a-44bc-a916-f31a5ed7d220", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:05:34.623Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "a9bb6b20-0a41-47a8-898b-56ffb4361da7"} 2025-11-27T12:05:39.050Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "6c3595c6-2245-4304-aac2-651bcf7a75c7", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:05:39.119Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "6c3595c6-2245-4304-aac2-651bcf7a75c7", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:05:40.315Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "004ea1f8-3b53-4649-903e-d47781612b46", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local:3306) to ProxySQL\n / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local:3306) to ProxySQL\n / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:05:46.166Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d4773906-3c96-4865-a152-d9c734b3f73a", "user": "xtrabackup"} 2025-11-27T12:05:46.182Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d4773906-3c96-4865-a152-d9c734b3f73a", "user": "xtrabackup"} 2025-11-27T12:05:46.257Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d4773906-3c96-4865-a152-d9c734b3f73a", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-27T12:05:46.287Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d4773906-3c96-4865-a152-d9c734b3f73a", "user": "xtrabackup"} 2025-11-27T12:05:46.300Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d4773906-3c96-4865-a152-d9c734b3f73a", "user": "xtrabackup"} 2025-11-27T12:05:46.306Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d4773906-3c96-4865-a152-d9c734b3f73a", "last-applied-secret": "a129018960796adccb0f3236d814f7f227be0dd2c53ee1ec7eaf54ac51dbbd14"} 2025-11-27T12:05:46.309Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d4773906-3c96-4865-a152-d9c734b3f73a", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:05:48.997Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d4773906-3c96-4865-a152-d9c734b3f73a"} 2025-11-27T12:07:37.148Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "e8b57366-861b-4d00-aff2-06c4602c1b96", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:07:42.419Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "fc09ab4e-9ad3-4e62-b9e6-01893ed5900c", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:07:47.555Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4ca700e1-df03-467f-a1c9-8a7384adc37b", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:07:52.714Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "e14cbd8a-75f2-471e-b7a9-082cd82f9319", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:07:57.872Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "2be27673-d39d-45d9-90bd-b406472623ae", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:08:03.014Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "ddb86a4c-f8b2-4e94-80fb-4f17e4e01150", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:08:08.185Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "5e7199d6-0a4c-45a4-9a8b-7aab20189e93", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:08:17.190Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "f3db546c-d97a-448b-ac62-0af5e829aafd"} 2025-11-27T12:08:20.662Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "cb3778c6-cd2b-455b-a245-a3688fc31a2c", "user": "monitor"} 2025-11-27T12:08:20.677Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "cb3778c6-cd2b-455b-a245-a3688fc31a2c", "user": "monitor"} 2025-11-27T12:08:20.699Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "cb3778c6-cd2b-455b-a245-a3688fc31a2c", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-27T12:08:20.728Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "cb3778c6-cd2b-455b-a245-a3688fc31a2c", "user": "monitor"} 2025-11-27T12:08:20.752Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "cb3778c6-cd2b-455b-a245-a3688fc31a2c", "user": "monitor"} 2025-11-27T12:08:20.849Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "cb3778c6-cd2b-455b-a245-a3688fc31a2c", "last-applied-secret": "7e7c15af99cd23e9bb798619a267f5284f598a28f2fa29f0e9f3dca5edac8552"} 2025-11-27T12:08:20.853Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "cb3778c6-cd2b-455b-a245-a3688fc31a2c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:08:22.155Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "45619be5-acee-4608-b059-3f5aa6be78be", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local:3306) to ProxySQL\n / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local:3306) to ProxySQL\n / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:08:56.641Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "cf520ac8-90e6-4d25-bc80-0d6d283cff27", "user": "monitor"} 2025-11-27T12:08:59.382Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "cf520ac8-90e6-4d25-bc80-0d6d283cff27"} 2025-11-27T12:09:13.232Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "05e9dd22-21d1-44dd-842b-4c8dcc45d392", "user": "monitor"} 2025-11-27T12:09:15.513Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "05e9dd22-21d1-44dd-842b-4c8dcc45d392"} 2025-11-27T12:09:18.232Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c464bb6a-a71a-461e-8a60-bc3a2aa9d52f", "user": "monitor"} 2025-11-27T12:09:20.096Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c464bb6a-a71a-461e-8a60-bc3a2aa9d52f"} 2025-11-27T12:09:23.814Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "f86e748e-1c23-471d-9cf0-ed775c17ecd0", "user": "monitor"} 2025-11-27T12:09:25.984Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "f86e748e-1c23-471d-9cf0-ed775c17ecd0"} 2025-11-27T12:09:29.455Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "e66b5ceb-33e2-4e42-b39e-20e937dfb5c4", "user": "monitor"} 2025-11-27T12:09:32.281Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "e66b5ceb-33e2-4e42-b39e-20e937dfb5c4"} 2025-11-27T12:09:35.550Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4b77670a-d6d1-48ca-9b23-bb82d5e7f9db", "user": "monitor"} 2025-11-27T12:09:37.489Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4b77670a-d6d1-48ca-9b23-bb82d5e7f9db"} 2025-11-27T12:09:41.131Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "86b4a565-3547-4be8-92d6-d3737991e950", "user": "monitor"} 2025-11-27T12:09:41.994Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "86b4a565-3547-4be8-92d6-d3737991e950", "user": "monitor"} 2025-11-27T12:09:42.008Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "86b4a565-3547-4be8-92d6-d3737991e950", "last-applied-secret": "7e7c15af99cd23e9bb798619a267f5284f598a28f2fa29f0e9f3dca5edac8552"} 2025-11-27T12:09:44.065Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "86b4a565-3547-4be8-92d6-d3737991e950"} 2025-11-27T12:09:49.013Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "220f476a-d77d-4b68-89f4-ab78c53f701b"} 2025-11-27T12:09:54.417Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d467f46e-dc74-436d-a70b-d92005bc35b1"} 2025-11-27T12:09:59.386Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "7176aef8-cd93-4853-a943-277f107c911a"} 2025-11-27T12:10:05.216Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "399858e4-9437-4edb-a7e7-cc8cfed3e60a"} 2025-11-27T12:10:09.266Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c36e31b7-f15d-4589-98dc-72fb2bfd1304", "user": "operator"} 2025-11-27T12:10:09.280Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c36e31b7-f15d-4589-98dc-72fb2bfd1304", "user": "operator"} 2025-11-27T12:10:09.314Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c36e31b7-f15d-4589-98dc-72fb2bfd1304", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-27T12:10:09.339Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c36e31b7-f15d-4589-98dc-72fb2bfd1304", "user": "operator"} 2025-11-27T12:10:09.353Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c36e31b7-f15d-4589-98dc-72fb2bfd1304", "user": "operator"} 2025-11-27T12:10:09.374Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c36e31b7-f15d-4589-98dc-72fb2bfd1304", "last-applied-secret": "d4dc62bc846d16b0766669d4af98c46dfe504b903b858c3de5a69955c46a659b"} 2025-11-27T12:10:09.378Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c36e31b7-f15d-4589-98dc-72fb2bfd1304", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:10:11.293Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "59bbc256-dce6-42f6-bb45-467f9566c602", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:10:37.255Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d8e013c6-9d9a-4246-89b3-7d6e158668a2"} 2025-11-27T12:10:42.414Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "120da39b-84c9-4d73-99f6-4570f8e49e9b"} 2025-11-27T12:10:47.873Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "6aac3164-2d19-4fef-bf7e-305b9e4d57bc"} 2025-11-27T12:10:52.860Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "b779679a-79dc-4885-a76e-5cd20779fada"} 2025-11-27T12:10:54.287Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "secrets": "my-cluster-secrets-2"} 2025-11-27T12:10:54.295Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "root"} 2025-11-27T12:10:54.317Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "root"} 2025-11-27T12:10:54.339Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "secret": "some-name-mysql-init", "user": "root"} 2025-11-27T12:10:56.857Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f"} 2025-11-27T12:10:56.888Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "root"} 2025-11-27T12:10:56.909Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "root"} 2025-11-27T12:10:56.916Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "operator"} 2025-11-27T12:10:56.930Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "operator"} 2025-11-27T12:10:57.003Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-27T12:10:57.069Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "operator"} 2025-11-27T12:10:57.092Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "operator"} 2025-11-27T12:10:57.101Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "monitor"} 2025-11-27T12:10:57.118Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "monitor"} 2025-11-27T12:10:57.136Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-27T12:10:57.153Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "monitor"} 2025-11-27T12:10:57.174Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "monitor"} 2025-11-27T12:10:57.459Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "xtrabackup"} 2025-11-27T12:10:57.477Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "xtrabackup"} 2025-11-27T12:10:57.495Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-27T12:10:57.525Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "xtrabackup"} 2025-11-27T12:10:57.538Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "xtrabackup"} 2025-11-27T12:10:57.544Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "replication"} 2025-11-27T12:10:57.556Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "replication"} 2025-11-27T12:10:57.594Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-27T12:10:57.613Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "replication"} 2025-11-27T12:10:57.623Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "replication"} 2025-11-27T12:10:57.623Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "proxyadmin"} 2025-11-27T12:10:57.640Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "proxyadmin"} 2025-11-27T12:10:57.662Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "user": "proxyadmin"} 2025-11-27T12:10:57.662Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "last-applied-secret": "9bbe2dfbce805028700b728051632816a6ff975eed0d04314c07f20a7d43b40e"} 2025-11-27T12:10:57.662Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "last-applied-secret": "9bbe2dfbce805028700b728051632816a6ff975eed0d04314c07f20a7d43b40e"} 2025-11-27T12:10:57.664Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:10:57.725Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:10:59.912Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1fe4e373-2b31-4caa-b4ec-8f2dc724059f", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:11:51.793Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "b704f604-1ae3-4d31-871a-09d50107d324", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-30642 on 34.118.224.10:53: no such host"} 2025-11-27T12:11:56.826Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "51296ef5-20d5-4cc0-aa54-b803b84cc887", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-30642 on 34.118.224.10:53: no such host"} 2025-11-27T12:12:02.535Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d6213027-1370-45c6-9928-c21ab20b96f3", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-30642 on 34.118.224.10:53: no such host"} 2025-11-27T12:12:49.466Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "7dca8cbc-903e-4175-8bd6-a6b326ed8f1c", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.161.153.56:33062: connect: connection refused"} 2025-11-27T12:12:54.631Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "5bcb7ef6-e88f-4dd7-a9fd-a340d9117c91", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:12:59.780Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "7618e8d0-9ff0-44e0-b780-8f085de2f089", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:13:04.924Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "18397074-a349-4079-9ff3-9fdf67e24833", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:13:10.078Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "133628b7-9a96-4138-8c9d-e330a4a3cb38", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:13:15.531Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "54ce540d-bd83-4fe2-823d-8bc9046148e8", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:13:21.271Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "132e7270-63b5-4772-8267-000becdf3cf1", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:13:26.420Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "3c4db1cf-43d7-49c4-9847-0a59a8d30bad", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:13:31.566Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "855c509a-7301-4245-99a7-656e6ed9d6c5", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:13:36.704Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c92b2015-4e64-42e7-bcc0-13400c557e35", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:13:41.835Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "00930915-f712-4752-b662-e89c02553385", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:13:47.004Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4c9a471f-3938-43b4-a342-69899085635c", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:13:52.140Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c209077c-0113-4065-a26b-71e27adc21ef", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:13:57.270Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4dd1016f-503c-40f6-84fe-b3e4fa55b0da", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:02.419Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "bc64beaf-f130-4d55-95d8-97b2f6ff2c36", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:07.573Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "35926bf1-2ddb-4015-aea7-ad682319e5b1", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:12.735Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1521358c-fc30-495c-999c-7a7ab501ae7a", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:17.881Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "db161f9a-a004-4d81-981c-3db9ae91a1c0", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:23.032Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "694ae43f-918e-47ab-be83-fc95152cd134", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:23.260Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "e55003a8-f6fb-4e46-9382-07047f0bd09b", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:28.245Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d7147539-02c5-4f3d-8c1a-a97b0f36803d", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:33.410Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "f8482b59-ba18-4359-a941-d6eb5f55dd98", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:38.560Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "00cbf48b-1d7c-4e1a-8812-dd50daf65b10", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:38.711Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "e21af54f-48b1-4985-9e99-2bb5530e3500", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:43.695Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "d164baff-6997-458b-bbd8-7cd6c4adeb5f", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:48.837Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "82291fc1-a337-4d94-b033-c14b28345623", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:53.969Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "7854d429-4aec-4e04-88f7-441f7f0f3d08", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:14:59.131Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "2b6d13b9-1290-4eed-a269-4e2cc66344b2", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:15:04.262Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "47681559-5b66-4d28-a4bd-576f068995fe", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:15:09.404Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "61a367c4-a9e4-4b0e-8b0b-a1cba839c3ca", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:15:14.538Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10f5fcda-abae-4987-b769-034ae1c55404", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:15:19.670Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "939fd826-6a3f-4085-8445-8136242f4d30", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:15:24.864Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "ead33f80-cacf-4f62-9046-e529e0cec496", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:15:30.213Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "f9136f38-f1ea-4713-9dd2-e0c962f87bbe", "primary name": "some-name-pxc-0.some-name-pxc.users-30642.svc.cluster.local"} 2025-11-27T12:15:36.886Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "36338080-f953-4cce-948d-48ab94766818", "user": "monitor"} 2025-11-27T12:15:37.606Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "36338080-f953-4cce-948d-48ab94766818", "user": "monitor"} 2025-11-27T12:15:37.619Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "36338080-f953-4cce-948d-48ab94766818", "last-applied-secret": "9bbe2dfbce805028700b728051632816a6ff975eed0d04314c07f20a7d43b40e"} 2025-11-27T12:15:41.656Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "36338080-f953-4cce-948d-48ab94766818"} 2025-11-27T12:15:43.524Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "eed5a9c6-cae3-402f-915c-64eb59fef67c", "user": "operator"} 2025-11-27T12:15:43.539Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "eed5a9c6-cae3-402f-915c-64eb59fef67c", "user": "operator"} 2025-11-27T12:15:43.559Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "eed5a9c6-cae3-402f-915c-64eb59fef67c", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-27T12:15:43.582Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "eed5a9c6-cae3-402f-915c-64eb59fef67c", "user": "operator"} 2025-11-27T12:15:43.597Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "eed5a9c6-cae3-402f-915c-64eb59fef67c", "user": "operator"} 2025-11-27T12:15:43.620Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "eed5a9c6-cae3-402f-915c-64eb59fef67c", "last-applied-secret": "1208092e9177d484c3966ea444ffa30b0c9f3a6bff10ed5691cb6bd6f56a2c11"} 2025-11-27T12:15:43.624Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "02c61b56-2121-45ca-be2c-c9d316c7adfb"} 2025-11-27T12:15:43.627Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "eed5a9c6-cae3-402f-915c-64eb59fef67c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:16:19.615Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "e3434991-0275-4c09-8360-67d3fc83317d"} 2025-11-27T12:16:24.446Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "ad051251-2181-4eef-83dd-71885a14953d"} 2025-11-27T12:16:29.570Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "506adea5-192e-4a80-a8b6-e434dca5aefa"} 2025-11-27T12:16:34.845Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "9b14f85a-089f-4ba2-b235-84e60401076c"} 2025-11-27T12:16:40.196Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c924e85c-28e1-4ee5-8884-bb565551ff29"} 2025-11-27T12:16:45.455Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1f536c4a-0886-42c4-a539-393fadd07b08"} 2025-11-27T12:16:51.216Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "fee0cb26-cffc-4965-9f8d-9dd2d1b33990"} 2025-11-27T12:16:56.092Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "930f6722-b688-434b-8b47-f1ce59e7272b"} 2025-11-27T12:17:01.482Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "ce9438ca-852d-4a4d-a24b-eb90b000aede"} 2025-11-27T12:17:06.370Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "fff493d9-3d1e-4284-a9e8-175519608329"} 2025-11-27T12:17:11.994Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "a4aef9f7-968b-4d6e-942c-fe1ab67c5bb6"} 2025-11-27T12:17:17.652Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "0f91a9d9-2a29-466b-9e61-436a8b9fa15b"} 2025-11-27T12:17:23.074Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "285b1676-09b8-4b6d-a8be-6db384956cfb"} 2025-11-27T12:17:28.468Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "b085caee-96b3-4740-8ce3-e08bcea339ba"} 2025-11-27T12:17:33.675Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "67c7ba3d-5b98-4481-be20-6e23a429f32c"} 2025-11-27T12:17:35.243Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "root"} 2025-11-27T12:17:35.265Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "root"} 2025-11-27T12:17:35.293Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "secret": "some-name-mysql-init", "user": "root"} 2025-11-27T12:17:37.768Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39"} 2025-11-27T12:17:37.792Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "root"} 2025-11-27T12:17:37.810Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "root"} 2025-11-27T12:17:37.825Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "monitor"} 2025-11-27T12:17:37.839Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "monitor"} 2025-11-27T12:17:37.859Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-27T12:17:37.876Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "monitor"} 2025-11-27T12:17:37.896Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "monitor"} 2025-11-27T12:17:38.180Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "xtrabackup"} 2025-11-27T12:17:38.193Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "xtrabackup"} 2025-11-27T12:17:38.214Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-27T12:17:38.242Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "xtrabackup"} 2025-11-27T12:17:38.254Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "xtrabackup"} 2025-11-27T12:17:38.261Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "proxyadmin"} 2025-11-27T12:17:38.279Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "proxyadmin"} 2025-11-27T12:17:38.301Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "user": "proxyadmin"} 2025-11-27T12:17:38.301Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "last-applied-secret": "5fce66101834f62791efa535a848526f04443d42d0c476bae97722a9954ff302"} 2025-11-27T12:17:38.301Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "last-applied-secret": "5fce66101834f62791efa535a848526f04443d42d0c476bae97722a9954ff302"} 2025-11-27T12:17:38.304Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:17:38.418Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:17:40.050Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "10a38ae2-49e1-44e2-b176-cf4d9e28ca39", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-27T12:17:57.689Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4463e4fd-9d9c-4250-bbc8-d5f0dca736a0", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:17:57.737Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4463e4fd-9d9c-4250-bbc8-d5f0dca736a0", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-27T12:17:57.782Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4463e4fd-9d9c-4250-bbc8-d5f0dca736a0", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-27T12:17:57.866Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4463e4fd-9d9c-4250-bbc8-d5f0dca736a0", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-27T12:17:58.022Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4463e4fd-9d9c-4250-bbc8-d5f0dca736a0", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-27T12:17:58.851Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "67616958-2070-4dea-b9c8-eb62602376e4", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-27T12:19:29.427Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4c4d260d-f7bc-418e-ab59-5efbfd5e2d8f", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-30642 on 34.118.224.10:53: no such host"} 2025-11-27T12:20:06.252Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c2091f14-0ade-45a0-8b34-8aa4da96dc35", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-30642 on 34.118.224.10:53: no such host"} 2025-11-27T12:20:06.580Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "22eeb3c7-bd8a-4cfb-96de-3f2081e58946", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-30642 on 34.118.224.10:53: no such host"} 2025-11-27T12:20:53.986Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "root"} 2025-11-27T12:20:54.008Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "root"} 2025-11-27T12:20:54.030Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "secret": "some-name-mysql-init", "user": "root"} 2025-11-27T12:20:54.052Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "root"} 2025-11-27T12:20:54.069Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "root"} 2025-11-27T12:20:54.077Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "operator"} 2025-11-27T12:20:54.091Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "operator"} 2025-11-27T12:20:54.114Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-27T12:20:54.144Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "operator"} 2025-11-27T12:20:54.159Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "operator"} 2025-11-27T12:20:54.166Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "monitor"} 2025-11-27T12:20:54.179Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "monitor"} 2025-11-27T12:20:54.202Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-27T12:20:54.222Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "monitor"} 2025-11-27T12:20:54.362Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "xtrabackup"} 2025-11-27T12:20:54.375Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "xtrabackup"} 2025-11-27T12:20:54.443Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-27T12:20:54.463Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "xtrabackup"} 2025-11-27T12:20:54.475Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "xtrabackup"} 2025-11-27T12:20:54.481Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "replication"} 2025-11-27T12:20:54.495Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "replication"} 2025-11-27T12:20:54.550Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-27T12:20:54.568Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "replication"} 2025-11-27T12:20:54.579Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "user": "replication"} 2025-11-27T12:20:54.579Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "last-applied-secret": "d4dc62bc846d16b0766669d4af98c46dfe504b903b858c3de5a69955c46a659b"} 2025-11-27T12:20:54.581Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "1ab93a3e-15cd-40ed-8213-11125058dcc7", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-27T12:22:38.904Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "6d27d082-3add-44f0-a856-8692c1c5fc55", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-30642 on 34.118.224.10:53: no such host"} 2025-11-27T12:23:26.923Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c7fe1648-6193-485e-90e1-079fabd01a71", "user": "monitor"} 2025-11-27T12:23:27.634Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "c7fe1648-6193-485e-90e1-079fabd01a71", "user": "monitor"} 2025-11-27T12:23:29.574Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "fa144260-523d-4bf0-9104-ea7acbbd46c4", "user": "monitor"} 2025-11-27T12:23:29.586Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "fa144260-523d-4bf0-9104-ea7acbbd46c4", "user": "monitor"} 2025-11-27T12:23:29.610Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "fa144260-523d-4bf0-9104-ea7acbbd46c4", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-27T12:23:29.629Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "fa144260-523d-4bf0-9104-ea7acbbd46c4", "user": "monitor"} 2025-11-27T12:23:31.927Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "ca23141d-584d-465e-825c-0598dbb07c8d", "user": "monitor"} 2025-11-27T12:23:37.552Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "5d84aba5-b8fb-44f8-86f0-62d4360de789", "user": "monitor"} 2025-11-27T12:23:43.520Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "dd859809-dd53-4117-87a4-64e55e46dbea", "user": "monitor"} 2025-11-27T12:23:49.392Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "18a2916d-537b-4084-a753-577f2e40ae35", "user": "monitor"} 2025-11-27T12:23:55.547Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-30642", "name": "some-name", "reconcileID": "4ac50f8f-8a76-4c59-b4fd-39be4d276a24", "user": "monitor"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:856 -  }, -  { -  }, -  { -  }, -  }, +  }, -  "1208092e9177d484c3966ea444ffa30b0c9f3a6bff10ed5691cb6bd6f56a2c11", +  "1208092e9177d484c3966ea444ffa30b0c9f3a6bff10ed5691cb6bd6f56a2c11", -  "38e3fcecd16efc6d079552900b5a55bf0fe89d5ae9298e755e339635d6711157", -  "5fce66101834f62791efa535a848526f04443d42d0c476bae97722a9954ff302", +  "5fce66101834f62791efa535a848526f04443d42d0c476bae97722a9954ff302", -  "7e7c15af99cd23e9bb798619a267f5284f598a28f2fa29f0e9f3dca5edac8552", +  "7e7c15af99cd23e9bb798619a267f5284f598a28f2fa29f0e9f3dca5edac8552", -  "9bbe2dfbce805028700b728051632816a6ff975eed0d04314c07f20a7d43b40e", +  "9bbe2dfbce805028700b728051632816a6ff975eed0d04314c07f20a7d43b40e", -  "a129018960796adccb0f3236d814f7f227be0dd2c53ee1ec7eaf54ac51dbbd14", -  Annotations: map[string]string{ +  Annotations: map[string]string{ +  APIVersion: "", -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  APIVersion: "v1", -  Args: []string{"logrotate"}, +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, -  CollisionCount: &0, +  CollisionCount: nil, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-11-27 11:58:59 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-proxysql-556b487f65", -  CurrentRevision: "some-name-proxysql-5bd4ccc476", -  CurrentRevision: "some-name-proxysql-67ccd75849", -  CurrentRevision: "some-name-proxysql-6f78649fb4", -  CurrentRevision: "some-name-proxysql-769b66cc5c", -  CurrentRevision: "some-name-proxysql-7865f9ff97", -  CurrentRevision: "some-name-pxc-556b487889", -  CurrentRevision: "some-name-pxc-5769c54847", -  CurrentRevision: "some-name-pxc-78dbd6f499", -  CurrentRevision: "some-name-pxc-fbf655748", -  "d4dc62bc846d16b0766669d4af98c46dfe504b903b858c3de5a69955c46a659b", +  "d4dc62bc846d16b0766669d4af98c46dfe504b903b858c3de5a69955c46a659b", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, -  Env: []v1.EnvVar{ -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Generation: 5, -  Generation: 6, -  Generation: 7, -  Generation: 8, -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  ImagePullPolicy: "Always", +  "last-applied-secret": "38e3fcecd16efc6d079552900b5a55bf0fe89d5ae9298e755e339635d6711157", +  "last-applied-secret": "a129018960796adccb0f3236d814f7f227be0dd2c53ee1ec7eaf54ac51dbbd14", +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", -  {Name: "IS_LOGCOLLECTOR", Value: "yes"}, -  {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, -  Name: "logrotate", -  Name: "logs", -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "SERVICE_TYPE", Value: "mysql"}, +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  ObservedGeneration: 5, -  ObservedGeneration: 6, -  ObservedGeneration: 7, -  ObservedGeneration: 8, -  Operation: "Update", -  Operation: "Update", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTIwODA5MmU5MTc3ZDQ4NGMzOTY2ZWE0NDRmZmEzMGIwYzlmM2E2YmZmMTBlZDU2OTFjYjZiZDZmNTZhMmMxMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTIwODA5MmU5MTc3ZDQ4NGMzOTY2ZWE0NDRmZmEzMGIwYzlmM2E2YmZmMTBlZDU2OTFjYjZiZDZmNTZhMmMxMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzhlM2ZjZWNkMTZlZmM2ZDA3OTU1MjkwMGI1YTU1YmYwZmU4OWQ1YWU5Mjk4ZTc1NWUzMzk2MzVkNjcxMTE1NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiN2U3YzE1YWY5OWNkMjNlOWJiNzk4NjE5YTI2N2Y1Mjg0ZjU5OGEyOGYyZmEyOWYwZTlmM2RjYTVlZGFjODU1MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiN2U3YzE1YWY5OWNkMjNlOWJiNzk4NjE5YTI2N2Y1Mjg0ZjU5OGEyOGYyZmEyOWYwZTlmM2RjYTVlZGFjODU1MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNWZjZTY2MTAxODM0ZjYyNzkxZWZhNTM1YTg0ODUyNmYwNDQ0M2Q0MmQwYzQ3NmJhZTk3NzIyYTk5NTRmZjMwMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWJiZTJkZmJjZTgwNTAyODcwMGI3MjgwNTE2MzI4MTZhNmZmOTc1ZWVkMGQwNDMxNGMwN2YyMGE3ZDQzYjQwZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWJiZTJkZmJjZTgwNTAyODcwMGI3MjgwNTE2MzI4MTZhNmZmOTc1ZWVkMGQwNDMxNGMwN2YyMGE3ZDQzYjQwZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDRkYzYyYmM4NDZkMTZiMDc2NjY2OWQ0YWY5OGM0NmRmZTUwNGI5MDNiODU4YzNkZTVhNjk5NTVjNDZhNjU5YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDRkYzYyYmM4NDZkMTZiMDc2NjY2OWQ0YWY5OGM0NmRmZTUwNGI5MDNiODU4YzNkZTVhNjk5NTVjNDZhNjU5YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNWZjZTY2MTAxODM0ZjYyNzkxZWZhNTM1YTg0ODUyNmYwNDQ0M2Q0MmQwYzQ3NmJhZTk3NzIyYTk5NTRmZjMwMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNWZjZTY2MTAxODM0ZjYyNzkxZWZhNTM1YTg0ODUyNmYwNDQ0M2Q0MmQwYzQ3NmJhZTk3NzIyYTk5NTRmZjMwMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNWZjZTY2MTAxODM0ZjYyNzkxZWZhNTM1YTg0ODUyNmYwNDQ0M2Q0MmQwYzQ3NmJhZTk3NzIyYTk5NTRmZjMwMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTkzLTFlYjM3YjIwIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM4LjAiLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNWZjZTY2MTAxODM0ZjYyNzkxZWZhNTM1YTg0ODUyNmYwNDQ0M2Q0MmQwYzQ3NmJhZTk3NzIyYTk5NTRmZjMwMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTkzLTFlYjM3YjIwIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIxNTk0NDM3In0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWJiZTJkZmJjZTgwNTAyODcwMGI3MjgwNTE2MzI4MTZhNmZmOTc1ZWVkMGQwNDMxNGMwN2YyMGE3ZDQzYjQwZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWJiZTJkZmJjZTgwNTAyODcwMGI3MjgwNTE2MzI4MTZhNmZmOTc1ZWVkMGQwNDMxNGMwN2YyMGE3ZDQzYjQwZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYTEyOTAxODk2MDc5NmFkY2NiMGYzMjM2ZDgxNGY3ZjIyN2JlMGRkMmM1M2VlMWVjN2VhZjU0YWM1MWRiYmQxNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYTEyOTAxODk2MDc5NmFkY2NiMGYzMjM2ZDgxNGY3ZjIyN2JlMGRkMmM1M2VlMWVjN2VhZjU0YWM1MWRiYmQxNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDRkYzYyYmM4NDZkMTZiMDc2NjY2OWQ0YWY5OGM0NmRmZTUwNGI5MDNiODU4YzNkZTVhNjk5NTVjNDZhNjU5YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMzhlM2ZjZWNkMTZlZmM2ZDA3OTU1MjkwMGI1YTU1YmYwZmU4OWQ1YWU5Mjk4ZTc1NWUzMzk2MzVkNjcxMTE1NyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, +  Replicas: 0, -  Replicas: 2, -  Replicas: &2, +  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, +  ResourceVersion: "", -  ResourceVersion: "1764244959018575000", -  ResourceVersion: "1764245002916143011", -  ResourceVersion: "1764245086197583011", -  ResourceVersion: "1764245126108927011", -  ResourceVersion: "1764245140082719011", -  ResourceVersion: "1764245288573887000", -  ResourceVersion: "1764245329479759011", -  ResourceVersion: "1764245431627439011", -  ResourceVersion: "1764245481710351011", -  ResourceVersion: "1764245730770383000", -  ResourceVersion: "1764245768143311011", -  ResourceVersion: "1764245873996703000", -  ResourceVersion: "1764246049906047000", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2025-11-27 11:58:59 +0000 UTC", -  Time: s"2025-11-27 12:02:39 +0000 UTC", -  Time: s"2025-11-27 12:03:22 +0000 UTC", -  Time: s"2025-11-27 12:04:26 +0000 UTC", -  Time: s"2025-11-27 12:04:46 +0000 UTC", -  Time: s"2025-11-27 12:04:52 +0000 UTC", -  Time: s"2025-11-27 12:05:26 +0000 UTC", -  Time: s"2025-11-27 12:05:39 +0000 UTC", -  Time: s"2025-11-27 12:05:40 +0000 UTC", -  Time: s"2025-11-27 12:05:46 +0000 UTC", -  Time: s"2025-11-27 12:08:08 +0000 UTC", -  Time: s"2025-11-27 12:08:20 +0000 UTC", -  Time: s"2025-11-27 12:08:49 +0000 UTC", -  Time: s"2025-11-27 12:10:09 +0000 UTC", -  Time: s"2025-11-27 12:10:31 +0000 UTC", -  Time: s"2025-11-27 12:10:57 +0000 UTC", -  Time: s"2025-11-27 12:11:21 +0000 UTC", -  Time: s"2025-11-27 12:15:30 +0000 UTC", -  Time: s"2025-11-27 12:15:43 +0000 UTC", -  Time: s"2025-11-27 12:16:08 +0000 UTC", -  Time: s"2025-11-27 12:17:38 +0000 UTC", -  Time: s"2025-11-27 12:17:53 +0000 UTC", -  Time: s"2025-11-27 12:17:57 +0000 UTC", -  Time: s"2025-11-27 12:20:49 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "56880d10-afa1-4f03-8f1c-663edc4fbca9", -  UID: "ab3d070d-8580-4465-8209-3028366740e7", +  UpdatedReplicas: 0, -  UpdatedReplicas: 1, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-proxysql-556b487f65", -  UpdateRevision: "some-name-proxysql-5bd4ccc476", -  UpdateRevision: "some-name-proxysql-67ccd75849", -  UpdateRevision: "some-name-proxysql-6f78649fb4", -  UpdateRevision: "some-name-proxysql-769b66cc5c", -  UpdateRevision: "some-name-proxysql-7865f9ff97", -  UpdateRevision: "some-name-pxc-556b487889", -  UpdateRevision: "some-name-pxc-5769c54847", -  UpdateRevision: "some-name-pxc-657844d98c", -  UpdateRevision: "some-name-pxc-78dbd6f499", -  UpdateRevision: "some-name-pxc-fbf655748", -  VolumeMode: &"Filesystem", +  VolumeMode: nil, -  VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}},   }    },    },    {    },    },    {    },    }, ""),    },    {    },    },    },    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 3 identical elements    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical fields    ... // 5 identical elements    ... // 5 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 8 identical fields    ... // 9 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Annotations: map[string]string{    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMapKeyRef: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    Containers: []v1.Container{    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...},    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    FieldPath: "metadata.name",    FieldPath: "metadata.namespace",    FieldRef: &v1.ObjectFieldSelector{    Finalizers: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostIP: "",    HostPort: 0,    ImagePullPolicy: "Always",    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    "kubectl.kubernetes.io/default-container": "proxysql",    "kubectl.kubernetes.io/default-container": "pxc",    Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: nil,    "last-applied-secret": strings.Join({    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-pxc"},    ManagedFields: nil,    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    {Name: "CLUSTER_HASH", Value: "1594437"},    Name: "config",    Name: "ist",    {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"},    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}},    Name: "POD_NAME",    Name: "POD_NAMESPASE",    Name: "proxyadm",    Namespace: "users-30642",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}},    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "38e3fcecd16efc6d079552900b5a55bf0fe89d5ae9298e755e339635d6711157", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "5fce66101834f62791efa535a848526f04443d42d0c476bae97722a9954ff302", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "1d400763-86c3-4c24-b0b4-08b05469f9a2", ...}},    OwnerReferences: nil,    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &2,    Replicas: &3,    ResizePolicy: nil,    ResourceFieldRef: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}},    SecretName: "internal-some-name",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    Value: "",    ValueFrom: &v1.EnvVarSource{    VolumeAttributesClassName: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-30642 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.8kxuRRzfJG ++ mktemp + local LAST_ERR=/tmp/tmp.ew435bfgAL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8kxuRRzfJG perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-30642 namespace + cat /tmp/tmp.ew435bfgAL + rm /tmp/tmp.8kxuRRzfJG /tmp/tmp.ew435bfgAL + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.taKoko5MVi ++ mktemp + local LAST_ERR=/tmp/tmp.ry30nD3761 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.taKoko5MVi No resources found + cat /tmp/tmp.ry30nD3761 + rm /tmp/tmp.taKoko5MVi /tmp/tmp.ry30nD3761 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.kLJuvclUtL ++ mktemp + local LAST_ERR=/tmp/tmp.3mHdsEzrPv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kLJuvclUtL No resources found + cat /tmp/tmp.3mHdsEzrPv + rm /tmp/tmp.kLJuvclUtL /tmp/tmp.3mHdsEzrPv + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.RtaOZhgjNY ++ mktemp + local LAST_ERR=/tmp/tmp.siw7NOh2bt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RtaOZhgjNY validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.siw7NOh2bt + rm /tmp/tmp.RtaOZhgjNY /tmp/tmp.siw7NOh2bt + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-30642 + rm -rf /tmp/tmp.f8TwUdsyjw + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.87BN6mzxgv + local LAST_OUT=/tmp/tmp.bhLvTk6IZp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.3yvRNP4Xnh + local exit_status=0 + local LAST_ERR=/tmp/tmp.4fDs81CKAC + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-30642 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator