Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/logs/users-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-9970 + local ns=users-9970 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-9116 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.mdRTpCTu5x ++ mktemp + local LAST_ERR=/tmp/tmp.54b271jev6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mdRTpCTu5x perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-9116 namespace + cat /tmp/tmp.54b271jev6 + rm /tmp/tmp.mdRTpCTu5x /tmp/tmp.54b271jev6 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.12MYt3Qqgn ++ mktemp + local LAST_ERR=/tmp/tmp.zWvS6PrA61 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.12MYt3Qqgn No resources found + cat /tmp/tmp.zWvS6PrA61 + rm /tmp/tmp.12MYt3Qqgn /tmp/tmp.zWvS6PrA61 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.5M41aLNCZT ++ mktemp + local LAST_ERR=/tmp/tmp.1HvUQ6mW8c + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5M41aLNCZT No resources found + cat /tmp/tmp.1HvUQ6mW8c + rm /tmp/tmp.5M41aLNCZT /tmp/tmp.1HvUQ6mW8c + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.CltfJbZRxZ + local LAST_OUT=/tmp/tmp.mGOFOKghGq ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.VKlW01mNn2 + local exit_status=0 + local LAST_ERR=/tmp/tmp.btO6O4GQWK + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CltfJbZRxZ + cat /tmp/tmp.VKlW01mNn2 + rm /tmp/tmp.CltfJbZRxZ /tmp/tmp.VKlW01mNn2 + return 0 namespace "users-9116" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mGOFOKghGq namespace "pxc-operator" deleted + cat /tmp/tmp.btO6O4GQWK + rm /tmp/tmp.mGOFOKghGq /tmp/tmp.btO6O4GQWK + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.5zqVnJh5SO ++ mktemp + local LAST_ERR=/tmp/tmp.ITmChj9SuX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5zqVnJh5SO namespace/pxc-operator created + cat /tmp/tmp.ITmChj9SuX + rm /tmp/tmp.5zqVnJh5SO /tmp/tmp.ITmChj9SuX + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.cF6KY1Pr8N +++ mktemp ++ local LAST_ERR=/tmp/tmp.StpM2Seghq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cF6KY1Pr8N ++ cat /tmp/tmp.StpM2Seghq ++ rm /tmp/tmp.cF6KY1Pr8N /tmp/tmp.StpM2Seghq ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-0538614f-6-cluster2 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.YeGPHgcjQC ++ mktemp + local LAST_ERR=/tmp/tmp.1hTqsjbhHF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-0538614f-6-cluster2 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YeGPHgcjQC Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-0538614f-6-cluster2" modified. + cat /tmp/tmp.1hTqsjbhHF + rm /tmp/tmp.YeGPHgcjQC /tmp/tmp.1hTqsjbhHF + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.UiAL0LLmF0 ++ mktemp + local LAST_ERR=/tmp/tmp.E7dSHGhTgF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UiAL0LLmF0 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.E7dSHGhTgF + rm /tmp/tmp.UiAL0LLmF0 /tmp/tmp.E7dSHGhTgF + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.hwBSN3WIsC ++ mktemp + local LAST_ERR=/tmp/tmp.TblZrrbAZM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hwBSN3WIsC clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.TblZrrbAZM + rm /tmp/tmp.hwBSN3WIsC /tmp/tmp.TblZrrbAZM + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2154-0538614f^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.griAQDwE9o ++ mktemp + local LAST_ERR=/tmp/tmp.ox9KfF9x59 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.griAQDwE9o deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.ox9KfF9x59 + rm /tmp/tmp.griAQDwE9o /tmp/tmp.ox9KfF9x59 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.AAnYudbCH8 ++ mktemp + local LAST_ERR=/tmp/tmp.PwwUpK7nsg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AAnYudbCH8 pod/percona-xtradb-cluster-operator-c799c8d46-lhr6j condition met + cat /tmp/tmp.PwwUpK7nsg + rm /tmp/tmp.AAnYudbCH8 /tmp/tmp.PwwUpK7nsg + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.p4vV7N3iso +++ mktemp ++ local LAST_ERR=/tmp/tmp.vGUSTpx7wf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p4vV7N3iso ++ cat /tmp/tmp.vGUSTpx7wf ++ rm /tmp/tmp.p4vV7N3iso /tmp/tmp.vGUSTpx7wf ++ return 0 + wait_pod percona-xtradb-cluster-operator-c799c8d46-lhr6j 480 pxc-operator + local pod=percona-xtradb-cluster-operator-c799c8d46-lhr6j + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-c799c8d46-lhr6j ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-c799c8d46-lhr6j condition met waiting for pod/percona-xtradb-cluster-operator-c799c8d46-lhr6j to become Ready.Ok + sleep 3 + create_namespace users-9970 + local namespace=users-9970 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces users-9970' + xargs kubectl delete ns + set +o xtrace + awk '{print$1}' ----------------------------------------------------------------------------------- cleaned up old namespaces users-9970 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-9970 ++ mktemp ++ mktemp + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + local LAST_OUT=/tmp/tmp.fXzzqiS8zm + local LAST_OUT=/tmp/tmp.wFoE4c2zk1 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.3qm7jV7JZU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + local LAST_ERR=/tmp/tmp.nySH6U620N + kubectl delete namespace users-9970 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-9970 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wFoE4c2zk1 + cat /tmp/tmp.nySH6U620N + rm /tmp/tmp.wFoE4c2zk1 /tmp/tmp.nySH6U620N + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-9970 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.fXzzqiS8zm + cat /tmp/tmp.3qm7jV7JZU Error from server (NotFound): namespaces "users-9970" not found + rm /tmp/tmp.fXzzqiS8zm /tmp/tmp.3qm7jV7JZU + return 1 + : + wait_for_delete namespace/users-9970 + local res=namespace/users-9970 + echo -n 'waiting for namespace/users-9970 to be deleted' waiting for namespace/users-9970 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-9970" not found + desc 'create namespace users-9970' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-9970 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-9970 ++ mktemp + local LAST_OUT=/tmp/tmp.WRv83e0nhV ++ mktemp + local LAST_ERR=/tmp/tmp.tdQCj45Dmz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-9970 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WRv83e0nhV namespace/users-9970 created + cat /tmp/tmp.tdQCj45Dmz + rm /tmp/tmp.WRv83e0nhV /tmp/tmp.tdQCj45Dmz + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.HXsI6U8ccQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.TvgiAcah8i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HXsI6U8ccQ ++ cat /tmp/tmp.TvgiAcah8i ++ rm /tmp/tmp.HXsI6U8ccQ /tmp/tmp.TvgiAcah8i ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-0538614f-6-cluster2 --namespace=users-9970 ++ mktemp + local LAST_OUT=/tmp/tmp.Tl08z3wwgG ++ mktemp + local LAST_ERR=/tmp/tmp.2i6FP19F0F + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-0538614f-6-cluster2 --namespace=users-9970 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Tl08z3wwgG Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2154-0538614f-6-cluster2" modified. + cat /tmp/tmp.2i6FP19F0F + rm /tmp/tmp.Tl08z3wwgG /tmp/tmp.2i6FP19F0F + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.G2dNnkhNAn ++ mktemp + local LAST_ERR=/tmp/tmp.iyLvjDuVwD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.G2dNnkhNAn secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.iyLvjDuVwD + rm /tmp/tmp.G2dNnkhNAn /tmp/tmp.iyLvjDuVwD + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.lwaayCjLGg ++ mktemp + local LAST_ERR=/tmp/tmp.ViPDlBPQ59 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lwaayCjLGg secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.ViPDlBPQ59 + rm /tmp/tmp.lwaayCjLGg /tmp/tmp.ViPDlBPQ59 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/client.yml + kubectl_bin apply -f - ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/client.yml + local LAST_OUT=/tmp/tmp.yn6XlNWjeA + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-9970~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' ++ mktemp + local LAST_ERR=/tmp/tmp.kJ6ZQO3519 + local exit_status=0 + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2154-0538614f#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yn6XlNWjeA deployment.apps/pxc-client created + cat /tmp/tmp.kJ6ZQO3519 + rm /tmp/tmp.yn6XlNWjeA /tmp/tmp.kJ6ZQO3519 + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_OUT=/tmp/tmp.VYNavfta2o + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-9970~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2154-0538614f#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + local LAST_ERR=/tmp/tmp.wmHrfVozv9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VYNavfta2o perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.wmHrfVozv9 + rm /tmp/tmp.VYNavfta2o /tmp/tmp.wmHrfVozv9 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.IQAsZ1bQ2a ++++ mktemp +++ local LAST_ERR=/tmp/tmp.P0LEZuqEsy +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.IQAsZ1bQ2a +++ cat /tmp/tmp.P0LEZuqEsy +++ rm /tmp/tmp.IQAsZ1bQ2a /tmp/tmp.P0LEZuqEsy +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.1tqbH1LnIU ++++ mktemp +++ local LAST_ERR=/tmp/tmp.W5daIcTkO6 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.1tqbH1LnIU +++ cat /tmp/tmp.W5daIcTkO6 +++ rm /tmp/tmp.1tqbH1LnIU /tmp/tmp.W5daIcTkO6 +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9970 ++ mktemp + local LAST_OUT=/tmp/tmp.rNcx15TlWt ++ mktemp + local LAST_ERR=/tmp/tmp.m8XINHDkAZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9970 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9970 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9970 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.rNcx15TlWt + cat /tmp/tmp.m8XINHDkAZ error: no matching resources found + rm /tmp/tmp.rNcx15TlWt /tmp/tmp.m8XINHDkAZ + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' ++ echo some-name-pxc-2 + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.jubqzmbKqO +++ mktemp ++ local LAST_ERR=/tmp/tmp.mI1BDXYXoZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jubqzmbKqO ++ cat /tmp/tmp.mI1BDXYXoZ ++ rm /tmp/tmp.jubqzmbKqO /tmp/tmp.mI1BDXYXoZ ++ return 0 + local 'root_pass=z@dL7!r_4p1d[K%tSfY' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4P8qgT5CYf +++ mktemp ++ local LAST_ERR=/tmp/tmp.W3q3a1OnWG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4P8qgT5CYf ++ cat /tmp/tmp.W3q3a1OnWG ++ rm /tmp/tmp.4P8qgT5CYf /tmp/tmp.W3q3a1OnWG ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y8r1K7MnPu +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vi4nNjp4cF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y8r1K7MnPu ++ cat /tmp/tmp.Vi4nNjp4cF ++ rm /tmp/tmp.y8r1K7MnPu /tmp/tmp.Vi4nNjp4cF ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xNRd6gTlXV +++ mktemp ++ local LAST_ERR=/tmp/tmp.IBH6hZVlQ7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xNRd6gTlXV ++ cat /tmp/tmp.IBH6hZVlQ7 ++ rm /tmp/tmp.xNRd6gTlXV /tmp/tmp.IBH6hZVlQ7 ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql /tmp/tmp.pBNSeZJ1Hi/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XYvcZGQnJZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.PFe8oHEH8s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XYvcZGQnJZ ++ cat /tmp/tmp.PFe8oHEH8s ++ rm /tmp/tmp.XYvcZGQnJZ /tmp/tmp.PFe8oHEH8s ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql /tmp/tmp.pBNSeZJ1Hi/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yKgzc5gBdd +++ mktemp ++ local LAST_ERR=/tmp/tmp.0hT2bAe8wI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yKgzc5gBdd ++ cat /tmp/tmp.0hT2bAe8wI ++ rm /tmp/tmp.yKgzc5gBdd /tmp/tmp.0hT2bAe8wI ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-1.sql /tmp/tmp.pBNSeZJ1Hi/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pGQWABRKeG +++ mktemp ++ local LAST_ERR=/tmp/tmp.dPdPsbYpU1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pGQWABRKeG ++ cat /tmp/tmp.dPdPsbYpU1 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.pGQWABRKeG /tmp/tmp.dPdPsbYpU1 ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.fJq4ZdEeaX +++ mktemp ++ local LAST_ERR=/tmp/tmp.lRbwRlRbwI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fJq4ZdEeaX ++ cat /tmp/tmp.lRbwRlRbwI ++ rm /tmp/tmp.fJq4ZdEeaX /tmp/tmp.lRbwRlRbwI ++ return 0 + secret_pass='z@dL7!r_4p1d[K%tSfY' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.SdESW0BR13 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nfwD8ao73d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SdESW0BR13 ++ cat /tmp/tmp.nfwD8ao73d ++ rm /tmp/tmp.SdESW0BR13 /tmp/tmp.nfwD8ao73d ++ return 0 + int_secret_pass='z@dL7!r_4p1d[K%tSfY' + [[ -z z@dL7!r_4p1d[K%tSfY ]] + [[ z@dL7!r_4p1d[K%tSfY != \z\@\d\L\7\!\r\_\4\p\1\d\[\K\%\t\S\f\Y ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''z@dL7!r_4p1d[K%tSfY'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hmIZ3BzxMU +++ mktemp ++ local LAST_ERR=/tmp/tmp.LoyMkFLztr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hmIZ3BzxMU ++ cat /tmp/tmp.LoyMkFLztr ++ rm /tmp/tmp.hmIZ3BzxMU /tmp/tmp.LoyMkFLztr ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.pBNSeZJ1Hi/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.L4RHhGwkVY +++ mktemp ++ local LAST_ERR=/tmp/tmp.bTb3vmnQdZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L4RHhGwkVY ++ cat /tmp/tmp.bTb3vmnQdZ ++ rm /tmp/tmp.L4RHhGwkVY /tmp/tmp.bTb3vmnQdZ ++ return 0 + secret_pass='V9?dI1AX=4THJVN.Qlx^k ]] + [[ r>AX=4THJVN.Qlx^k != \r\>\A\X\=\4\T\H\J\V\N\.\Q\l\x\^\k ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''r>AX=4THJVN.Qlx^k'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''r>AX=4THJVN.Qlx^k'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''r>AX=4THJVN.Qlx^k'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''r>AX=4THJVN.Qlx^k'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql /tmp/tmp.pBNSeZJ1Hi/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.sJ5E4VmQ2H +++ mktemp ++ local LAST_ERR=/tmp/tmp.tIDeR4DS3d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sJ5E4VmQ2H ++ cat /tmp/tmp.tIDeR4DS3d ++ rm /tmp/tmp.sJ5E4VmQ2H /tmp/tmp.tIDeR4DS3d ++ return 0 + secret_pass='!ysnvdY{US}FNLF]' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ base64 --decode ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.asO8pBdIw1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rQV1lsCVTE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.asO8pBdIw1 ++ cat /tmp/tmp.rQV1lsCVTE ++ rm /tmp/tmp.asO8pBdIw1 /tmp/tmp.rQV1lsCVTE ++ return 0 + int_secret_pass='!ysnvdY{US}FNLF]' + [[ -z !ysnvdY{US}FNLF] ]] + [[ !ysnvdY{US}FNLF] != \!\y\s\n\v\d\Y\{\U\S\}\F\N\L\F\] ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''!ysnvdY{US}FNLF]'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''!ysnvdY{US}FNLF]'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''!ysnvdY{US}FNLF]'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''!ysnvdY{US}FNLF]'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SUMy2kESdf +++ mktemp ++ local LAST_ERR=/tmp/tmp.aZUUWlrLMr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SUMy2kESdf ++ cat /tmp/tmp.aZUUWlrLMr ++ rm /tmp/tmp.SUMy2kESdf /tmp/tmp.aZUUWlrLMr ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.pBNSeZJ1Hi/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.b5Caxq9imD +++ mktemp ++ local LAST_ERR=/tmp/tmp.fpfVmne5Ly ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b5Caxq9imD ++ cat /tmp/tmp.fpfVmne5Ly ++ rm /tmp/tmp.b5Caxq9imD /tmp/tmp.fpfVmne5Ly ++ return 0 + secret_pass='R4gSFc&ja#PKOa55^)' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.k7eRF7BrLp +++ mktemp ++ local LAST_ERR=/tmp/tmp.w79imSDkPV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k7eRF7BrLp ++ cat /tmp/tmp.w79imSDkPV ++ rm /tmp/tmp.k7eRF7BrLp /tmp/tmp.w79imSDkPV ++ return 0 + int_secret_pass='R4gSFc&ja#PKOa55^)' + [[ -z R4gSFc&ja#PKOa55^) ]] + [[ R4gSFc&ja#PKOa55^) != \R\4\g\S\F\c\&\j\a\#\P\K\O\a\5\5\^\) ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''R4gSFc&ja#PKOa55^)'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''R4gSFc&ja#PKOa55^)'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''R4gSFc&ja#PKOa55^)'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''R4gSFc&ja#PKOa55^)'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lVRBBDEOuz +++ mktemp ++ local LAST_ERR=/tmp/tmp.lDEqIhgH65 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lVRBBDEOuz ++ cat /tmp/tmp.lDEqIhgH65 ++ rm /tmp/tmp.lVRBBDEOuz /tmp/tmp.lDEqIhgH65 ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.pBNSeZJ1Hi/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.M5EbgOTDif ++ mktemp + local LAST_ERR=/tmp/tmp.coU60lwRJ0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M5EbgOTDif secret/my-cluster-secrets patched + cat /tmp/tmp.coU60lwRJ0 + rm /tmp/tmp.M5EbgOTDif /tmp/tmp.coU60lwRJ0 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FdxFXKNkCn +++ mktemp ++ local LAST_ERR=/tmp/tmp.Nq14RcrlVf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FdxFXKNkCn ++ cat /tmp/tmp.Nq14RcrlVf ++ rm /tmp/tmp.FdxFXKNkCn /tmp/tmp.Nq14RcrlVf ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.pBNSeZJ1Hi/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.KKsycbT3sa ++ mktemp + local LAST_ERR=/tmp/tmp.8ojzL39A3e + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KKsycbT3sa perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.8ojzL39A3e + rm /tmp/tmp.KKsycbT3sa /tmp/tmp.8ojzL39A3e + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wwouCiUH2K +++ mktemp ++ local LAST_ERR=/tmp/tmp.1uEG1ZJXpO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wwouCiUH2K ++ cat /tmp/tmp.1uEG1ZJXpO ++ rm /tmp/tmp.wwouCiUH2K /tmp/tmp.1uEG1ZJXpO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.44zFRePaPd +++ mktemp ++ local LAST_ERR=/tmp/tmp.3KoW8v2204 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.44zFRePaPd ++ cat /tmp/tmp.3KoW8v2204 ++ rm /tmp/tmp.44zFRePaPd /tmp/tmp.3KoW8v2204 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Tx7bjYsaVA +++ mktemp ++ local LAST_ERR=/tmp/tmp.FizzAoU5s7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Tx7bjYsaVA ++ cat /tmp/tmp.FizzAoU5s7 ++ rm /tmp/tmp.Tx7bjYsaVA /tmp/tmp.FizzAoU5s7 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.BeDAWVaB4X ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ygYsUdYmiX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.BeDAWVaB4X +++++ cat /tmp/tmp.ygYsUdYmiX +++++ rm /tmp/tmp.BeDAWVaB4X /tmp/tmp.ygYsUdYmiX +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qApY2VEwLo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.HRJnAhlM5m +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qApY2VEwLo +++++ cat /tmp/tmp.HRJnAhlM5m +++++ rm /tmp/tmp.qApY2VEwLo /tmp/tmp.HRJnAhlM5m +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b0qd9d1zL9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ih2z4xm5gm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b0qd9d1zL9 ++ cat /tmp/tmp.Ih2z4xm5gm ++ rm /tmp/tmp.b0qd9d1zL9 /tmp/tmp.Ih2z4xm5gm ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.7F2PoQC0cu ++ mktemp + local LAST_ERR=/tmp/tmp.PXjaWuqksi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7F2PoQC0cu secret/my-cluster-secrets patched + cat /tmp/tmp.PXjaWuqksi + rm /tmp/tmp.7F2PoQC0cu /tmp/tmp.PXjaWuqksi + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xJPtxEePg2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hegVGuMTWl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xJPtxEePg2 ++ cat /tmp/tmp.hegVGuMTWl ++ rm /tmp/tmp.xJPtxEePg2 /tmp/tmp.hegVGuMTWl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RMZ6AuqCHJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.20cZ7rxj7M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RMZ6AuqCHJ ++ cat /tmp/tmp.20cZ7rxj7M ++ rm /tmp/tmp.RMZ6AuqCHJ /tmp/tmp.20cZ7rxj7M ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cO0uAbynxQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.gRSmIXa3og ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cO0uAbynxQ ++ cat /tmp/tmp.gRSmIXa3og ++ rm /tmp/tmp.cO0uAbynxQ /tmp/tmp.gRSmIXa3og ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WVc8ycnkqI +++ mktemp ++ local LAST_ERR=/tmp/tmp.A3YAVtBD2N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WVc8ycnkqI ++ cat /tmp/tmp.A3YAVtBD2N ++ rm /tmp/tmp.WVc8ycnkqI /tmp/tmp.A3YAVtBD2N ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9sZPzkQM5e +++ mktemp ++ local LAST_ERR=/tmp/tmp.n8ZigM4YBs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9sZPzkQM5e ++ cat /tmp/tmp.n8ZigM4YBs ++ rm /tmp/tmp.9sZPzkQM5e /tmp/tmp.n8ZigM4YBs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VuKuIsSPAR +++ mktemp ++ local LAST_ERR=/tmp/tmp.cg6RvLiHS5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VuKuIsSPAR ++ cat /tmp/tmp.cg6RvLiHS5 ++ rm /tmp/tmp.VuKuIsSPAR /tmp/tmp.cg6RvLiHS5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XLbjaaVIET +++ mktemp ++ local LAST_ERR=/tmp/tmp.jVjL11PXRT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XLbjaaVIET ++ cat /tmp/tmp.jVjL11PXRT ++ rm /tmp/tmp.XLbjaaVIET /tmp/tmp.jVjL11PXRT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4dm8HXz1Yn +++ mktemp ++ local LAST_ERR=/tmp/tmp.wWX5ImitMI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4dm8HXz1Yn ++ cat /tmp/tmp.wWX5ImitMI ++ rm /tmp/tmp.4dm8HXz1Yn /tmp/tmp.wWX5ImitMI ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5cllyzJlvc +++ mktemp ++ local LAST_ERR=/tmp/tmp.5I6aNagk5v ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5cllyzJlvc ++ cat /tmp/tmp.5I6aNagk5v ++ rm /tmp/tmp.5cllyzJlvc /tmp/tmp.5I6aNagk5v ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vAbJOXlA20 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Qf8tLwF1Ek +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vAbJOXlA20 +++++ cat /tmp/tmp.Qf8tLwF1Ek +++++ rm /tmp/tmp.vAbJOXlA20 /tmp/tmp.Qf8tLwF1Ek +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.DN21yZZwqX ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.AeaEWdIFPE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.DN21yZZwqX +++++ cat /tmp/tmp.AeaEWdIFPE +++++ rm /tmp/tmp.DN21yZZwqX /tmp/tmp.AeaEWdIFPE +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YIcAkRJrkt +++ mktemp ++ local LAST_ERR=/tmp/tmp.In3kB1BmNA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YIcAkRJrkt ++ cat /tmp/tmp.In3kB1BmNA ++ rm /tmp/tmp.YIcAkRJrkt /tmp/tmp.In3kB1BmNA ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql /tmp/tmp.pBNSeZJ1Hi/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql /tmp/tmp.pBNSeZJ1Hi/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-2.sql /tmp/tmp.pBNSeZJ1Hi/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.UGjSmlgYkZ ++ mktemp + local LAST_ERR=/tmp/tmp.ulmI0ZeGTs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UGjSmlgYkZ perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.ulmI0ZeGTs + rm /tmp/tmp.UGjSmlgYkZ /tmp/tmp.ulmI0ZeGTs + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ofJx8KJM59 ++ mktemp + local LAST_ERR=/tmp/tmp.Q25TK18tpY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ofJx8KJM59 secret/my-cluster-secrets patched + cat /tmp/tmp.Q25TK18tpY + rm /tmp/tmp.ofJx8KJM59 /tmp/tmp.Q25TK18tpY + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ph4z4YXt7i +++ mktemp ++ local LAST_ERR=/tmp/tmp.DzI68lfPsL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ph4z4YXt7i ++ cat /tmp/tmp.DzI68lfPsL ++ rm /tmp/tmp.ph4z4YXt7i /tmp/tmp.DzI68lfPsL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JkOO7wtiI4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.TNy8AuMi4z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JkOO7wtiI4 ++ cat /tmp/tmp.TNy8AuMi4z ++ rm /tmp/tmp.JkOO7wtiI4 /tmp/tmp.TNy8AuMi4z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.95atLkN2t1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dhbvWX5O3c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.95atLkN2t1 ++ cat /tmp/tmp.dhbvWX5O3c ++ rm /tmp/tmp.95atLkN2t1 /tmp/tmp.dhbvWX5O3c ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZQ5CYIO96j +++ mktemp ++ local LAST_ERR=/tmp/tmp.mfBYFfzhYL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZQ5CYIO96j ++ cat /tmp/tmp.mfBYFfzhYL ++ rm /tmp/tmp.ZQ5CYIO96j /tmp/tmp.mfBYFfzhYL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nZBSN80w1b +++ mktemp ++ local LAST_ERR=/tmp/tmp.HrPj6qItU5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nZBSN80w1b ++ cat /tmp/tmp.HrPj6qItU5 ++ rm /tmp/tmp.nZBSN80w1b /tmp/tmp.HrPj6qItU5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zFcL80TGPz +++ mktemp ++ local LAST_ERR=/tmp/tmp.TI5ACXMAeF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zFcL80TGPz ++ cat /tmp/tmp.TI5ACXMAeF ++ rm /tmp/tmp.zFcL80TGPz /tmp/tmp.TI5ACXMAeF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BtiTlxa8Rn +++ mktemp ++ local LAST_ERR=/tmp/tmp.DJ9h4dHk5K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BtiTlxa8Rn ++ cat /tmp/tmp.DJ9h4dHk5K ++ rm /tmp/tmp.BtiTlxa8Rn /tmp/tmp.DJ9h4dHk5K ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fcgKy3Nop7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.XoDyYdsPwb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fcgKy3Nop7 ++ cat /tmp/tmp.XoDyYdsPwb ++ rm /tmp/tmp.fcgKy3Nop7 /tmp/tmp.XoDyYdsPwb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9bq2E0iMPs +++ mktemp ++ local LAST_ERR=/tmp/tmp.hL64bNiBwY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9bq2E0iMPs ++ cat /tmp/tmp.hL64bNiBwY ++ rm /tmp/tmp.9bq2E0iMPs /tmp/tmp.hL64bNiBwY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PxcIksN9Ud +++ mktemp ++ local LAST_ERR=/tmp/tmp.ScEBcSdkZM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PxcIksN9Ud ++ cat /tmp/tmp.ScEBcSdkZM ++ rm /tmp/tmp.PxcIksN9Ud /tmp/tmp.ScEBcSdkZM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ers0miQS5p +++ mktemp ++ local LAST_ERR=/tmp/tmp.YNBi1CHqqs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ers0miQS5p ++ cat /tmp/tmp.YNBi1CHqqs ++ rm /tmp/tmp.Ers0miQS5p /tmp/tmp.YNBi1CHqqs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q5yGnt2qPi +++ mktemp ++ local LAST_ERR=/tmp/tmp.XKJ8KdlWnT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q5yGnt2qPi ++ cat /tmp/tmp.XKJ8KdlWnT ++ rm /tmp/tmp.Q5yGnt2qPi /tmp/tmp.XKJ8KdlWnT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3GjzJEfx2r +++ mktemp ++ local LAST_ERR=/tmp/tmp.SMVOWtkzdE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3GjzJEfx2r ++ cat /tmp/tmp.SMVOWtkzdE ++ rm /tmp/tmp.3GjzJEfx2r /tmp/tmp.SMVOWtkzdE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zIAXs0VqQm +++ mktemp ++ local LAST_ERR=/tmp/tmp.38RjStxrSh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zIAXs0VqQm ++ cat /tmp/tmp.38RjStxrSh ++ rm /tmp/tmp.zIAXs0VqQm /tmp/tmp.38RjStxrSh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hs4W00je75 +++ mktemp ++ local LAST_ERR=/tmp/tmp.a3rXUIg50J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hs4W00je75 ++ cat /tmp/tmp.a3rXUIg50J ++ rm /tmp/tmp.hs4W00je75 /tmp/tmp.a3rXUIg50J ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bS9ujEvw3O +++ mktemp ++ local LAST_ERR=/tmp/tmp.WVuAWyFd9u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bS9ujEvw3O ++ cat /tmp/tmp.WVuAWyFd9u ++ rm /tmp/tmp.bS9ujEvw3O /tmp/tmp.WVuAWyFd9u ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CUZ7HaeAiY +++ mktemp ++ local LAST_ERR=/tmp/tmp.1E2UXjcEBU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CUZ7HaeAiY ++ cat /tmp/tmp.1E2UXjcEBU ++ rm /tmp/tmp.CUZ7HaeAiY /tmp/tmp.1E2UXjcEBU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XFg7JZxhHc +++ mktemp ++ local LAST_ERR=/tmp/tmp.gil5ZdZQvM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XFg7JZxhHc ++ cat /tmp/tmp.gil5ZdZQvM ++ rm /tmp/tmp.XFg7JZxhHc /tmp/tmp.gil5ZdZQvM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mgP3js68mp +++ mktemp ++ local LAST_ERR=/tmp/tmp.LzHxHGvC3A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mgP3js68mp ++ cat /tmp/tmp.LzHxHGvC3A ++ rm /tmp/tmp.mgP3js68mp /tmp/tmp.LzHxHGvC3A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ChWFv6HdJl +++ mktemp ++ local LAST_ERR=/tmp/tmp.qEkSdJxIAh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ChWFv6HdJl ++ cat /tmp/tmp.qEkSdJxIAh ++ rm /tmp/tmp.ChWFv6HdJl /tmp/tmp.qEkSdJxIAh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FfP6KSX4nR +++ mktemp ++ local LAST_ERR=/tmp/tmp.oLVqlWFj2A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FfP6KSX4nR ++ cat /tmp/tmp.oLVqlWFj2A ++ rm /tmp/tmp.FfP6KSX4nR /tmp/tmp.oLVqlWFj2A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7QnqwED8ZF +++ mktemp ++ local LAST_ERR=/tmp/tmp.HgpGqh5Rwc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7QnqwED8ZF ++ cat /tmp/tmp.HgpGqh5Rwc ++ rm /tmp/tmp.7QnqwED8ZF /tmp/tmp.HgpGqh5Rwc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PqDcs3XOXC +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z1mK2zVQaF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PqDcs3XOXC ++ cat /tmp/tmp.Z1mK2zVQaF ++ rm /tmp/tmp.PqDcs3XOXC /tmp/tmp.Z1mK2zVQaF ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mEduyhHv4k +++ mktemp ++ local LAST_ERR=/tmp/tmp.OKxNo2wjL1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mEduyhHv4k ++ cat /tmp/tmp.OKxNo2wjL1 ++ rm /tmp/tmp.mEduyhHv4k /tmp/tmp.OKxNo2wjL1 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.uiNOlFTTz5 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.uP2lZH4m00 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.uiNOlFTTz5 +++++ cat /tmp/tmp.uP2lZH4m00 +++++ rm /tmp/tmp.uiNOlFTTz5 /tmp/tmp.uP2lZH4m00 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.iUh5vR5h3F ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.cSLZ46S4Dg +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.iUh5vR5h3F +++++ cat /tmp/tmp.cSLZ46S4Dg +++++ rm /tmp/tmp.iUh5vR5h3F /tmp/tmp.cSLZ46S4Dg +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N0wqrHOpbz +++ mktemp ++ local LAST_ERR=/tmp/tmp.i0K6KxZlzc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N0wqrHOpbz ++ cat /tmp/tmp.i0K6KxZlzc ++ rm /tmp/tmp.N0wqrHOpbz /tmp/tmp.i0K6KxZlzc ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3.sql /tmp/tmp.pBNSeZJ1Hi/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Xw6OmbsfzY ++ mktemp + local LAST_ERR=/tmp/tmp.gLOrXtlIxo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Xw6OmbsfzY secret/my-cluster-secrets patched + cat /tmp/tmp.gLOrXtlIxo + rm /tmp/tmp.Xw6OmbsfzY /tmp/tmp.gLOrXtlIxo + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.UpkvxA63qX +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vmsd1SIwbL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UpkvxA63qX ++ cat /tmp/tmp.Vmsd1SIwbL ++ rm /tmp/tmp.UpkvxA63qX /tmp/tmp.Vmsd1SIwbL ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7PPzA4LXLb +++ mktemp ++ local LAST_ERR=/tmp/tmp.sjbPjpgKJH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7PPzA4LXLb ++ cat /tmp/tmp.sjbPjpgKJH ++ rm /tmp/tmp.7PPzA4LXLb /tmp/tmp.sjbPjpgKJH ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.akboxy8csc +++ mktemp ++ local LAST_ERR=/tmp/tmp.FTpp0tsnaO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.akboxy8csc ++ cat /tmp/tmp.FTpp0tsnaO ++ rm /tmp/tmp.akboxy8csc /tmp/tmp.FTpp0tsnaO ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n8N1tsB5z1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YA0QGdVbpK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n8N1tsB5z1 ++ cat /tmp/tmp.YA0QGdVbpK ++ rm /tmp/tmp.n8N1tsB5z1 /tmp/tmp.YA0QGdVbpK ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HgdVvlcH8p +++ mktemp ++ local LAST_ERR=/tmp/tmp.wqhUtOwCnj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HgdVvlcH8p ++ cat /tmp/tmp.wqhUtOwCnj ++ rm /tmp/tmp.HgdVvlcH8p /tmp/tmp.wqhUtOwCnj ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iFIRE1IMiq +++ mktemp ++ local LAST_ERR=/tmp/tmp.sWRXGmpajo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iFIRE1IMiq ++ cat /tmp/tmp.sWRXGmpajo ++ rm /tmp/tmp.iFIRE1IMiq /tmp/tmp.sWRXGmpajo ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wPulpJRUGI +++ mktemp ++ local LAST_ERR=/tmp/tmp.DWshiR0izS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wPulpJRUGI ++ cat /tmp/tmp.DWshiR0izS ++ rm /tmp/tmp.wPulpJRUGI /tmp/tmp.DWshiR0izS ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.25ZdIQZgxZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.IEXr2rDT7t ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.25ZdIQZgxZ ++ cat /tmp/tmp.IEXr2rDT7t ++ rm /tmp/tmp.25ZdIQZgxZ /tmp/tmp.IEXr2rDT7t ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UYtVzCnbVt +++ mktemp ++ local LAST_ERR=/tmp/tmp.2WAzsjhEqg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UYtVzCnbVt ++ cat /tmp/tmp.2WAzsjhEqg ++ rm /tmp/tmp.UYtVzCnbVt /tmp/tmp.2WAzsjhEqg ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L5mM6cj9uC +++ mktemp ++ local LAST_ERR=/tmp/tmp.EfQ3ZyGwbj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L5mM6cj9uC ++ cat /tmp/tmp.EfQ3ZyGwbj ++ rm /tmp/tmp.L5mM6cj9uC /tmp/tmp.EfQ3ZyGwbj ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SZ1vRxO7MP +++ mktemp ++ local LAST_ERR=/tmp/tmp.GQoo8LdEn0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SZ1vRxO7MP ++ cat /tmp/tmp.GQoo8LdEn0 ++ rm /tmp/tmp.SZ1vRxO7MP /tmp/tmp.GQoo8LdEn0 ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S1WBKjZqOA +++ mktemp ++ local LAST_ERR=/tmp/tmp.jTR5c8cnHy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S1WBKjZqOA ++ cat /tmp/tmp.jTR5c8cnHy ++ rm /tmp/tmp.S1WBKjZqOA /tmp/tmp.jTR5c8cnHy ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RheYEeOekb +++ mktemp ++ local LAST_ERR=/tmp/tmp.hYbhCjnIEB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RheYEeOekb ++ cat /tmp/tmp.hYbhCjnIEB ++ rm /tmp/tmp.RheYEeOekb /tmp/tmp.hYbhCjnIEB ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FUQRcN6Txn +++ mktemp ++ local LAST_ERR=/tmp/tmp.mto5ljpNvW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FUQRcN6Txn ++ cat /tmp/tmp.mto5ljpNvW ++ rm /tmp/tmp.FUQRcN6Txn /tmp/tmp.mto5ljpNvW ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8xnCjWDwWr +++ mktemp ++ local LAST_ERR=/tmp/tmp.63QXsjRJ4m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8xnCjWDwWr ++ cat /tmp/tmp.63QXsjRJ4m ++ rm /tmp/tmp.8xnCjWDwWr /tmp/tmp.63QXsjRJ4m ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 13 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tlqY0ZOV1f +++ mktemp ++ local LAST_ERR=/tmp/tmp.tjRiZIR11t ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tlqY0ZOV1f ++ cat /tmp/tmp.tjRiZIR11t ++ rm /tmp/tmp.tlqY0ZOV1f /tmp/tmp.tjRiZIR11t ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 14 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TVI7ekDFBF +++ mktemp ++ local LAST_ERR=/tmp/tmp.H8aKKUMubD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TVI7ekDFBF ++ cat /tmp/tmp.H8aKKUMubD ++ rm /tmp/tmp.TVI7ekDFBF /tmp/tmp.H8aKKUMubD ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 15 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.848sYamCCh +++ mktemp ++ local LAST_ERR=/tmp/tmp.zwf1Rh7QSs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.848sYamCCh ++ cat /tmp/tmp.zwf1Rh7QSs ++ rm /tmp/tmp.848sYamCCh /tmp/tmp.zwf1Rh7QSs ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 16 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WZezSLxseK +++ mktemp ++ local LAST_ERR=/tmp/tmp.UgWRdoFUTA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WZezSLxseK ++ cat /tmp/tmp.UgWRdoFUTA ++ rm /tmp/tmp.WZezSLxseK /tmp/tmp.UgWRdoFUTA ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dBSXCzEBV4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9hPMoI10NO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dBSXCzEBV4 ++ cat /tmp/tmp.9hPMoI10NO ++ rm /tmp/tmp.dBSXCzEBV4 /tmp/tmp.9hPMoI10NO ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uKcvwmowW1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aMxKR4Qhgr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uKcvwmowW1 ++ cat /tmp/tmp.aMxKR4Qhgr ++ rm /tmp/tmp.uKcvwmowW1 /tmp/tmp.aMxKR4Qhgr ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2KznC2w8Mb ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.nG2e8qDlVq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2KznC2w8Mb +++++ cat /tmp/tmp.nG2e8qDlVq +++++ rm /tmp/tmp.2KznC2w8Mb /tmp/tmp.nG2e8qDlVq +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.iVeTRg5gnp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.YYP857HKOz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.iVeTRg5gnp +++++ cat /tmp/tmp.YYP857HKOz +++++ rm /tmp/tmp.iVeTRg5gnp /tmp/tmp.YYP857HKOz +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xTzbm5P3wY +++ mktemp ++ local LAST_ERR=/tmp/tmp.HWF4VJ6Sme ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xTzbm5P3wY ++ cat /tmp/tmp.HWF4VJ6Sme ++ rm /tmp/tmp.xTzbm5P3wY /tmp/tmp.HWF4VJ6Sme ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OiKPQJ4hEq +++ mktemp ++ local LAST_ERR=/tmp/tmp.8gdV2NpLAO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OiKPQJ4hEq ++ cat /tmp/tmp.8gdV2NpLAO ++ rm /tmp/tmp.OiKPQJ4hEq /tmp/tmp.8gdV2NpLAO ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.pBNSeZJ1Hi/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.o4BorjZkhs ++ mktemp + local LAST_ERR=/tmp/tmp.e0UqvFqZr2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.o4BorjZkhs secret/my-cluster-secrets patched + cat /tmp/tmp.e0UqvFqZr2 + rm /tmp/tmp.o4BorjZkhs /tmp/tmp.e0UqvFqZr2 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gOkIu9UGiF +++ mktemp ++ local LAST_ERR=/tmp/tmp.auFL6t7nSO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gOkIu9UGiF ++ cat /tmp/tmp.auFL6t7nSO ++ rm /tmp/tmp.gOkIu9UGiF /tmp/tmp.auFL6t7nSO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BkXo7ShAp4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uM9twMLqHv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BkXo7ShAp4 ++ cat /tmp/tmp.uM9twMLqHv ++ rm /tmp/tmp.BkXo7ShAp4 /tmp/tmp.uM9twMLqHv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YwnPxnMH3H +++ mktemp ++ local LAST_ERR=/tmp/tmp.c8GtDB9G9w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YwnPxnMH3H ++ cat /tmp/tmp.c8GtDB9G9w ++ rm /tmp/tmp.YwnPxnMH3H /tmp/tmp.c8GtDB9G9w ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4sQ7QGaU3S +++ mktemp ++ local LAST_ERR=/tmp/tmp.hD9gFUtKWq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4sQ7QGaU3S ++ cat /tmp/tmp.hD9gFUtKWq ++ rm /tmp/tmp.4sQ7QGaU3S /tmp/tmp.hD9gFUtKWq ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KOrib4oaD8 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1iWlyaB6Bb +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KOrib4oaD8 +++++ cat /tmp/tmp.1iWlyaB6Bb +++++ rm /tmp/tmp.KOrib4oaD8 /tmp/tmp.1iWlyaB6Bb +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.neD2cor2QA ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.DoN9SKyX0S +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.neD2cor2QA +++++ cat /tmp/tmp.DoN9SKyX0S +++++ rm /tmp/tmp.neD2cor2QA /tmp/tmp.DoN9SKyX0S +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OsKn0C7AE9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AmfaxBTesn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OsKn0C7AE9 ++ cat /tmp/tmp.AmfaxBTesn ++ rm /tmp/tmp.OsKn0C7AE9 /tmp/tmp.AmfaxBTesn ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K3i69x9JWo +++ mktemp ++ local LAST_ERR=/tmp/tmp.ugcoZdG7Q0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K3i69x9JWo ++ cat /tmp/tmp.ugcoZdG7Q0 ++ rm /tmp/tmp.K3i69x9JWo /tmp/tmp.ugcoZdG7Q0 ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.pBNSeZJ1Hi/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.O2lV0t8nef ++ mktemp + local LAST_ERR=/tmp/tmp.c3F9CKFhGc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.O2lV0t8nef perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.c3F9CKFhGc + rm /tmp/tmp.O2lV0t8nef /tmp/tmp.c3F9CKFhGc + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Btg6shYM7G +++ mktemp ++ local LAST_ERR=/tmp/tmp.V7jh1fESYL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Btg6shYM7G ++ cat /tmp/tmp.V7jh1fESYL ++ rm /tmp/tmp.Btg6shYM7G /tmp/tmp.V7jh1fESYL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HPlVzoYDBo +++ mktemp ++ local LAST_ERR=/tmp/tmp.C6XY9Ja1L2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HPlVzoYDBo ++ cat /tmp/tmp.C6XY9Ja1L2 ++ rm /tmp/tmp.HPlVzoYDBo /tmp/tmp.C6XY9Ja1L2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9QVjk9jxjR +++ mktemp ++ local LAST_ERR=/tmp/tmp.Pd7XCm8Ejw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9QVjk9jxjR ++ cat /tmp/tmp.Pd7XCm8Ejw ++ rm /tmp/tmp.9QVjk9jxjR /tmp/tmp.Pd7XCm8Ejw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4duoZSb14G +++ mktemp ++ local LAST_ERR=/tmp/tmp.u32HxHjGvX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4duoZSb14G ++ cat /tmp/tmp.u32HxHjGvX ++ rm /tmp/tmp.4duoZSb14G /tmp/tmp.u32HxHjGvX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a3cFiJm9Wn +++ mktemp ++ local LAST_ERR=/tmp/tmp.zJXP3VL7Dc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a3cFiJm9Wn ++ cat /tmp/tmp.zJXP3VL7Dc ++ rm /tmp/tmp.a3cFiJm9Wn /tmp/tmp.zJXP3VL7Dc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O9148VSPu4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tNOxdNwTIq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O9148VSPu4 ++ cat /tmp/tmp.tNOxdNwTIq ++ rm /tmp/tmp.O9148VSPu4 /tmp/tmp.tNOxdNwTIq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UJimkhOV1y +++ mktemp ++ local LAST_ERR=/tmp/tmp.c5jOlpZxSB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UJimkhOV1y ++ cat /tmp/tmp.c5jOlpZxSB ++ rm /tmp/tmp.UJimkhOV1y /tmp/tmp.c5jOlpZxSB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pc5LXkBSiV +++ mktemp ++ local LAST_ERR=/tmp/tmp.asRuG672Ki ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Pc5LXkBSiV ++ cat /tmp/tmp.asRuG672Ki ++ rm /tmp/tmp.Pc5LXkBSiV /tmp/tmp.asRuG672Ki ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.benX6uqR8z +++ mktemp ++ local LAST_ERR=/tmp/tmp.zzsl8Fs04O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.benX6uqR8z ++ cat /tmp/tmp.zzsl8Fs04O ++ rm /tmp/tmp.benX6uqR8z /tmp/tmp.zzsl8Fs04O ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AazPVIddae +++ mktemp ++ local LAST_ERR=/tmp/tmp.p6zNatILsh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AazPVIddae ++ cat /tmp/tmp.p6zNatILsh ++ rm /tmp/tmp.AazPVIddae /tmp/tmp.p6zNatILsh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mrHv0GIrPk +++ mktemp ++ local LAST_ERR=/tmp/tmp.ot8EE7amPe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mrHv0GIrPk ++ cat /tmp/tmp.ot8EE7amPe ++ rm /tmp/tmp.mrHv0GIrPk /tmp/tmp.ot8EE7amPe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T4gPLU4VQr +++ mktemp ++ local LAST_ERR=/tmp/tmp.pzWqLSuCef ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T4gPLU4VQr ++ cat /tmp/tmp.pzWqLSuCef ++ rm /tmp/tmp.T4gPLU4VQr /tmp/tmp.pzWqLSuCef ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4SfhqRXHpe +++ mktemp ++ local LAST_ERR=/tmp/tmp.NbvgsnfynR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4SfhqRXHpe ++ cat /tmp/tmp.NbvgsnfynR ++ rm /tmp/tmp.4SfhqRXHpe /tmp/tmp.NbvgsnfynR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JJi6flzgNB +++ mktemp ++ local LAST_ERR=/tmp/tmp.fDjUNgigvT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JJi6flzgNB ++ cat /tmp/tmp.fDjUNgigvT ++ rm /tmp/tmp.JJi6flzgNB /tmp/tmp.fDjUNgigvT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RWJNXO9f0x +++ mktemp ++ local LAST_ERR=/tmp/tmp.hqQjhePBvo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RWJNXO9f0x ++ cat /tmp/tmp.hqQjhePBvo ++ rm /tmp/tmp.RWJNXO9f0x /tmp/tmp.hqQjhePBvo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zyidRE6Hac +++ mktemp ++ local LAST_ERR=/tmp/tmp.mYQ9qGCpWQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zyidRE6Hac ++ cat /tmp/tmp.mYQ9qGCpWQ ++ rm /tmp/tmp.zyidRE6Hac /tmp/tmp.mYQ9qGCpWQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PBqqwCEDbb +++ mktemp ++ local LAST_ERR=/tmp/tmp.swy69Rt0vX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PBqqwCEDbb ++ cat /tmp/tmp.swy69Rt0vX ++ rm /tmp/tmp.PBqqwCEDbb /tmp/tmp.swy69Rt0vX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wH4IT206FE +++ mktemp ++ local LAST_ERR=/tmp/tmp.xRvpiRXU3l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wH4IT206FE ++ cat /tmp/tmp.xRvpiRXU3l ++ rm /tmp/tmp.wH4IT206FE /tmp/tmp.xRvpiRXU3l ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d6gXWfFljI +++ mktemp ++ local LAST_ERR=/tmp/tmp.FmV1LCxYn9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d6gXWfFljI ++ cat /tmp/tmp.FmV1LCxYn9 ++ rm /tmp/tmp.d6gXWfFljI /tmp/tmp.FmV1LCxYn9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qG0I9Zcjrx +++ mktemp ++ local LAST_ERR=/tmp/tmp.VmAsf2nlXT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qG0I9Zcjrx ++ cat /tmp/tmp.VmAsf2nlXT ++ rm /tmp/tmp.qG0I9Zcjrx /tmp/tmp.VmAsf2nlXT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.THLCKxuqMJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z9QCaZnNUF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.THLCKxuqMJ ++ cat /tmp/tmp.Z9QCaZnNUF ++ rm /tmp/tmp.THLCKxuqMJ /tmp/tmp.Z9QCaZnNUF ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MRh8UY6RYy +++ mktemp ++ local LAST_ERR=/tmp/tmp.mTIPgOl69B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MRh8UY6RYy ++ cat /tmp/tmp.mTIPgOl69B ++ rm /tmp/tmp.MRh8UY6RYy /tmp/tmp.mTIPgOl69B ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.57uLajSZHY ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.02954fUKAX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.57uLajSZHY +++++ cat /tmp/tmp.02954fUKAX +++++ rm /tmp/tmp.57uLajSZHY /tmp/tmp.02954fUKAX +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qHzqx7rBnK ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.n6r94JqigS +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qHzqx7rBnK +++++ cat /tmp/tmp.n6r94JqigS +++++ rm /tmp/tmp.qHzqx7rBnK /tmp/tmp.n6r94JqigS +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uh4Q4pdgO5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NbI2Yv2FHB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uh4Q4pdgO5 ++ cat /tmp/tmp.NbI2Yv2FHB ++ rm /tmp/tmp.uh4Q4pdgO5 /tmp/tmp.NbI2Yv2FHB ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.N3HKLjkN6Z ++ mktemp + local LAST_ERR=/tmp/tmp.LTtWXYq0SX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.N3HKLjkN6Z secret/my-cluster-secrets-2 patched + cat /tmp/tmp.LTtWXYq0SX + rm /tmp/tmp.N3HKLjkN6Z /tmp/tmp.LTtWXYq0SX + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TjQA6iOs3U +++ mktemp ++ local LAST_ERR=/tmp/tmp.ffIT5fuDDw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TjQA6iOs3U ++ cat /tmp/tmp.ffIT5fuDDw ++ rm /tmp/tmp.TjQA6iOs3U /tmp/tmp.ffIT5fuDDw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HQrVohqixG +++ mktemp ++ local LAST_ERR=/tmp/tmp.ePL56wSk51 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HQrVohqixG ++ cat /tmp/tmp.ePL56wSk51 ++ rm /tmp/tmp.HQrVohqixG /tmp/tmp.ePL56wSk51 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.389xNpszwn +++ mktemp ++ local LAST_ERR=/tmp/tmp.qPjeaeumQ2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.389xNpszwn ++ cat /tmp/tmp.qPjeaeumQ2 ++ rm /tmp/tmp.389xNpszwn /tmp/tmp.qPjeaeumQ2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ycZPlU71iI +++ mktemp ++ local LAST_ERR=/tmp/tmp.1RzXtVoyYn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ycZPlU71iI ++ cat /tmp/tmp.1RzXtVoyYn ++ rm /tmp/tmp.ycZPlU71iI /tmp/tmp.1RzXtVoyYn ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M9co3wgZ07 +++ mktemp ++ local LAST_ERR=/tmp/tmp.5SaAbUfmng ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M9co3wgZ07 ++ cat /tmp/tmp.5SaAbUfmng ++ rm /tmp/tmp.M9co3wgZ07 /tmp/tmp.5SaAbUfmng ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Rs3SE9IHff ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MiBiidYFWH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Rs3SE9IHff +++++ cat /tmp/tmp.MiBiidYFWH +++++ rm /tmp/tmp.Rs3SE9IHff /tmp/tmp.MiBiidYFWH +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GFVDYIwVW9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Bif0mk2bZo +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GFVDYIwVW9 +++++ cat /tmp/tmp.Bif0mk2bZo +++++ rm /tmp/tmp.GFVDYIwVW9 /tmp/tmp.Bif0mk2bZo +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5r9HJ3reKW +++ mktemp ++ local LAST_ERR=/tmp/tmp.inRkEMqtGX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5r9HJ3reKW ++ cat /tmp/tmp.inRkEMqtGX ++ rm /tmp/tmp.5r9HJ3reKW /tmp/tmp.inRkEMqtGX ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k90soaJkZt +++ mktemp ++ local LAST_ERR=/tmp/tmp.uxWMntMJ7O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k90soaJkZt ++ cat /tmp/tmp.uxWMntMJ7O ++ rm /tmp/tmp.k90soaJkZt /tmp/tmp.uxWMntMJ7O ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.pBNSeZJ1Hi/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.i0AyyORjy8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sTwBxYHUNp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i0AyyORjy8 ++ cat /tmp/tmp.sTwBxYHUNp ++ rm /tmp/tmp.i0AyyORjy8 /tmp/tmp.sTwBxYHUNp ++ return 0 + newpass='iK*=%}-*jy8IEBFP' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''iK*=%}-*jy8IEBFP'\'';' '-h some-name-pxc -uroot -p'\''iK*=%}-*jy8IEBFP'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''iK*=%}-*jy8IEBFP'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''iK*=%}-*jy8IEBFP'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O359BC7QIg +++ mktemp ++ local LAST_ERR=/tmp/tmp.1ZSDFfNjWn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O359BC7QIg ++ cat /tmp/tmp.1ZSDFfNjWn ++ rm /tmp/tmp.O359BC7QIg /tmp/tmp.1ZSDFfNjWn ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''iK*=%}-*jy8IEBFP'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''iK*=%}-*jy8IEBFP'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''iK*=%}-*jy8IEBFP'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''iK*=%}-*jy8IEBFP'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MA0kRzePb4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nwrujttwRb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MA0kRzePb4 ++ cat /tmp/tmp.nwrujttwRb ++ rm /tmp/tmp.MA0kRzePb4 /tmp/tmp.nwrujttwRb ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.pBNSeZJ1Hi/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.vFGtA69Nl9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sOxCMnpWFA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vFGtA69Nl9 ++ cat /tmp/tmp.sOxCMnpWFA ++ rm /tmp/tmp.vFGtA69Nl9 /tmp/tmp.sOxCMnpWFA ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.c18xmC7to5 ++ mktemp + local LAST_ERR=/tmp/tmp.fhQwgYuhCy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.c18xmC7to5 secret/my-cluster-secrets-2 configured + cat /tmp/tmp.fhQwgYuhCy Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.c18xmC7to5 /tmp/tmp.fhQwgYuhCy + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mhOxwle7S0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rCTVM8jIgS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mhOxwle7S0 ++ cat /tmp/tmp.rCTVM8jIgS ++ rm /tmp/tmp.mhOxwle7S0 /tmp/tmp.rCTVM8jIgS ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.pBNSeZJ1Hi/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.Vhv5vwB1nR + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2154-0538614f#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-9970~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.BVWsynVFMg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Vhv5vwB1nR perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.BVWsynVFMg + rm /tmp/tmp.Vhv5vwB1nR /tmp/tmp.BVWsynVFMg + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.irljZwLkIj +++ mktemp ++ local LAST_ERR=/tmp/tmp.1NRAg7f950 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.irljZwLkIj ++ cat /tmp/tmp.1NRAg7f950 ++ rm /tmp/tmp.irljZwLkIj /tmp/tmp.1NRAg7f950 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fGfy8meRTE +++ mktemp ++ local LAST_ERR=/tmp/tmp.4UXIAplKvB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fGfy8meRTE ++ cat /tmp/tmp.4UXIAplKvB ++ rm /tmp/tmp.fGfy8meRTE /tmp/tmp.4UXIAplKvB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l2IonXVHyV +++ mktemp ++ local LAST_ERR=/tmp/tmp.qqCZBXrvWm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l2IonXVHyV ++ cat /tmp/tmp.qqCZBXrvWm ++ rm /tmp/tmp.l2IonXVHyV /tmp/tmp.qqCZBXrvWm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qe0VCifckW +++ mktemp ++ local LAST_ERR=/tmp/tmp.sydH57TFU1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qe0VCifckW ++ cat /tmp/tmp.sydH57TFU1 ++ rm /tmp/tmp.Qe0VCifckW /tmp/tmp.sydH57TFU1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZOKSL1QtsN +++ mktemp ++ local LAST_ERR=/tmp/tmp.OSxv0rR87c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZOKSL1QtsN ++ cat /tmp/tmp.OSxv0rR87c ++ rm /tmp/tmp.ZOKSL1QtsN /tmp/tmp.OSxv0rR87c ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iezHGd8BWL +++ mktemp ++ local LAST_ERR=/tmp/tmp.ncyRBLNrt3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iezHGd8BWL ++ cat /tmp/tmp.ncyRBLNrt3 ++ rm /tmp/tmp.iezHGd8BWL /tmp/tmp.ncyRBLNrt3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.syW8NJizLp +++ mktemp ++ local LAST_ERR=/tmp/tmp.O4fMjZasJh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.syW8NJizLp ++ cat /tmp/tmp.O4fMjZasJh ++ rm /tmp/tmp.syW8NJizLp /tmp/tmp.O4fMjZasJh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BNi4pl08Uf +++ mktemp ++ local LAST_ERR=/tmp/tmp.AJyNhjV7o9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BNi4pl08Uf ++ cat /tmp/tmp.AJyNhjV7o9 ++ rm /tmp/tmp.BNi4pl08Uf /tmp/tmp.AJyNhjV7o9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3DreXp7fhX +++ mktemp ++ local LAST_ERR=/tmp/tmp.HhClh7Az2w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3DreXp7fhX ++ cat /tmp/tmp.HhClh7Az2w ++ rm /tmp/tmp.3DreXp7fhX /tmp/tmp.HhClh7Az2w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ldXkCkvlc5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7qhYKvePpC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ldXkCkvlc5 ++ cat /tmp/tmp.7qhYKvePpC ++ rm /tmp/tmp.ldXkCkvlc5 /tmp/tmp.7qhYKvePpC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RYeuay3gbz +++ mktemp ++ local LAST_ERR=/tmp/tmp.FTsZQbUwgy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RYeuay3gbz ++ cat /tmp/tmp.FTsZQbUwgy ++ rm /tmp/tmp.RYeuay3gbz /tmp/tmp.FTsZQbUwgy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FPoSWCuxrD +++ mktemp ++ local LAST_ERR=/tmp/tmp.2T6gtMjwFI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FPoSWCuxrD ++ cat /tmp/tmp.2T6gtMjwFI ++ rm /tmp/tmp.FPoSWCuxrD /tmp/tmp.2T6gtMjwFI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YsBTo2L6B7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qwlEOctimR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YsBTo2L6B7 ++ cat /tmp/tmp.qwlEOctimR ++ rm /tmp/tmp.YsBTo2L6B7 /tmp/tmp.qwlEOctimR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8TDAMDAE5f +++ mktemp ++ local LAST_ERR=/tmp/tmp.2gFhJ3Mn6G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8TDAMDAE5f ++ cat /tmp/tmp.2gFhJ3Mn6G ++ rm /tmp/tmp.8TDAMDAE5f /tmp/tmp.2gFhJ3Mn6G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6EVB1YoWNZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.siVRNMSuU3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6EVB1YoWNZ ++ cat /tmp/tmp.siVRNMSuU3 ++ rm /tmp/tmp.6EVB1YoWNZ /tmp/tmp.siVRNMSuU3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uzswNCEWl5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gLSi2aO0lj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uzswNCEWl5 ++ cat /tmp/tmp.gLSi2aO0lj ++ rm /tmp/tmp.uzswNCEWl5 /tmp/tmp.gLSi2aO0lj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JibtEGn4kP +++ mktemp ++ local LAST_ERR=/tmp/tmp.TlXZSwnMXY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JibtEGn4kP ++ cat /tmp/tmp.TlXZSwnMXY ++ rm /tmp/tmp.JibtEGn4kP /tmp/tmp.TlXZSwnMXY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QXVEfnDYlg +++ mktemp ++ local LAST_ERR=/tmp/tmp.MzFp3WysnC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QXVEfnDYlg ++ cat /tmp/tmp.MzFp3WysnC ++ rm /tmp/tmp.QXVEfnDYlg /tmp/tmp.MzFp3WysnC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8JW5ubry11 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NnRC3MBgtm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8JW5ubry11 ++ cat /tmp/tmp.NnRC3MBgtm ++ rm /tmp/tmp.8JW5ubry11 /tmp/tmp.NnRC3MBgtm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.apnfLPZA9i +++ mktemp ++ local LAST_ERR=/tmp/tmp.bTMbb6NfmG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.apnfLPZA9i ++ cat /tmp/tmp.bTMbb6NfmG ++ rm /tmp/tmp.apnfLPZA9i /tmp/tmp.bTMbb6NfmG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yT6mcv5IXy +++ mktemp ++ local LAST_ERR=/tmp/tmp.ds1LFIcPGd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yT6mcv5IXy ++ cat /tmp/tmp.ds1LFIcPGd ++ rm /tmp/tmp.yT6mcv5IXy /tmp/tmp.ds1LFIcPGd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.urBgRcf3iL +++ mktemp ++ local LAST_ERR=/tmp/tmp.5015Kye5Og ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.urBgRcf3iL ++ cat /tmp/tmp.5015Kye5Og ++ rm /tmp/tmp.urBgRcf3iL /tmp/tmp.5015Kye5Og ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KD1MkyrZTX +++ mktemp ++ local LAST_ERR=/tmp/tmp.bfl9Zpw6I8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KD1MkyrZTX ++ cat /tmp/tmp.bfl9Zpw6I8 ++ rm /tmp/tmp.KD1MkyrZTX /tmp/tmp.bfl9Zpw6I8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Y9bdcCAAo +++ mktemp ++ local LAST_ERR=/tmp/tmp.h37fIHJAzL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4Y9bdcCAAo ++ cat /tmp/tmp.h37fIHJAzL ++ rm /tmp/tmp.4Y9bdcCAAo /tmp/tmp.h37fIHJAzL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HjRgU26CJO +++ mktemp ++ local LAST_ERR=/tmp/tmp.PxXI9YBmHv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HjRgU26CJO ++ cat /tmp/tmp.PxXI9YBmHv ++ rm /tmp/tmp.HjRgU26CJO /tmp/tmp.PxXI9YBmHv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nxG4pad7Y1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qFEG5xI3CC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nxG4pad7Y1 ++ cat /tmp/tmp.qFEG5xI3CC ++ rm /tmp/tmp.nxG4pad7Y1 /tmp/tmp.qFEG5xI3CC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QNHPxCFRfV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ySopaKdOek ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QNHPxCFRfV ++ cat /tmp/tmp.ySopaKdOek ++ rm /tmp/tmp.QNHPxCFRfV /tmp/tmp.ySopaKdOek ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6B4PHgHsq2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ir8WkDOV3G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6B4PHgHsq2 ++ cat /tmp/tmp.ir8WkDOV3G ++ rm /tmp/tmp.6B4PHgHsq2 /tmp/tmp.ir8WkDOV3G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G0drCpP3Nc +++ mktemp ++ local LAST_ERR=/tmp/tmp.VVfJGr6AGt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G0drCpP3Nc ++ cat /tmp/tmp.VVfJGr6AGt ++ rm /tmp/tmp.G0drCpP3Nc /tmp/tmp.VVfJGr6AGt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s8jEavlA9n +++ mktemp ++ local LAST_ERR=/tmp/tmp.Yp6uCvfwVg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s8jEavlA9n ++ cat /tmp/tmp.Yp6uCvfwVg ++ rm /tmp/tmp.s8jEavlA9n /tmp/tmp.Yp6uCvfwVg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P1dU05GZHc +++ mktemp ++ local LAST_ERR=/tmp/tmp.em2tCd04Ww ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P1dU05GZHc ++ cat /tmp/tmp.em2tCd04Ww ++ rm /tmp/tmp.P1dU05GZHc /tmp/tmp.em2tCd04Ww ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yH9aIXl6cA +++ mktemp ++ local LAST_ERR=/tmp/tmp.GoOoGbWMaU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yH9aIXl6cA ++ cat /tmp/tmp.GoOoGbWMaU ++ rm /tmp/tmp.yH9aIXl6cA /tmp/tmp.GoOoGbWMaU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FO44ddrlxg +++ mktemp ++ local LAST_ERR=/tmp/tmp.jGW2kdZ6LP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FO44ddrlxg ++ cat /tmp/tmp.jGW2kdZ6LP ++ rm /tmp/tmp.FO44ddrlxg /tmp/tmp.jGW2kdZ6LP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YWGuRTITUd +++ mktemp ++ local LAST_ERR=/tmp/tmp.QQgCjquQKe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YWGuRTITUd ++ cat /tmp/tmp.QQgCjquQKe ++ rm /tmp/tmp.YWGuRTITUd /tmp/tmp.QQgCjquQKe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F2YdGAAcaO +++ mktemp ++ local LAST_ERR=/tmp/tmp.S8VArCJ7uQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F2YdGAAcaO ++ cat /tmp/tmp.S8VArCJ7uQ ++ rm /tmp/tmp.F2YdGAAcaO /tmp/tmp.S8VArCJ7uQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v9K9V4p5Dk +++ mktemp ++ local LAST_ERR=/tmp/tmp.biZDb2ylN2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v9K9V4p5Dk ++ cat /tmp/tmp.biZDb2ylN2 ++ rm /tmp/tmp.v9K9V4p5Dk /tmp/tmp.biZDb2ylN2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rLFjjsCCdE +++ mktemp ++ local LAST_ERR=/tmp/tmp.t5JLH0Wa6X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rLFjjsCCdE ++ cat /tmp/tmp.t5JLH0Wa6X ++ rm /tmp/tmp.rLFjjsCCdE /tmp/tmp.t5JLH0Wa6X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B16EE4WGUZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.sOvxsD3Cfk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B16EE4WGUZ ++ cat /tmp/tmp.sOvxsD3Cfk ++ rm /tmp/tmp.B16EE4WGUZ /tmp/tmp.sOvxsD3Cfk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MeKkhgjcFT +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ld1W5t6cCE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MeKkhgjcFT ++ cat /tmp/tmp.Ld1W5t6cCE ++ rm /tmp/tmp.MeKkhgjcFT /tmp/tmp.Ld1W5t6cCE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RPiGa0RXgk +++ mktemp ++ local LAST_ERR=/tmp/tmp.iUgdQFfr4r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RPiGa0RXgk ++ cat /tmp/tmp.iUgdQFfr4r ++ rm /tmp/tmp.RPiGa0RXgk /tmp/tmp.iUgdQFfr4r ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cdTLyeahNX +++ mktemp ++ local LAST_ERR=/tmp/tmp.MdeYVTQFjA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cdTLyeahNX ++ cat /tmp/tmp.MdeYVTQFjA ++ rm /tmp/tmp.cdTLyeahNX /tmp/tmp.MdeYVTQFjA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZmbOa2z4GD +++ mktemp ++ local LAST_ERR=/tmp/tmp.iPus6wLy63 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZmbOa2z4GD ++ cat /tmp/tmp.iPus6wLy63 ++ rm /tmp/tmp.ZmbOa2z4GD /tmp/tmp.iPus6wLy63 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FMgryBCehN +++ mktemp ++ local LAST_ERR=/tmp/tmp.lYFO7cw7fO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FMgryBCehN ++ cat /tmp/tmp.lYFO7cw7fO ++ rm /tmp/tmp.FMgryBCehN /tmp/tmp.lYFO7cw7fO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kx7PVvescb +++ mktemp ++ local LAST_ERR=/tmp/tmp.VPiWzAFzc8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kx7PVvescb ++ cat /tmp/tmp.VPiWzAFzc8 ++ rm /tmp/tmp.kx7PVvescb /tmp/tmp.VPiWzAFzc8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UFbnMQlLZD +++ mktemp ++ local LAST_ERR=/tmp/tmp.j5pDoGz8Zf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UFbnMQlLZD ++ cat /tmp/tmp.j5pDoGz8Zf ++ rm /tmp/tmp.UFbnMQlLZD /tmp/tmp.j5pDoGz8Zf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fEW22MNaDt +++ mktemp ++ local LAST_ERR=/tmp/tmp.s40ClR8zIh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fEW22MNaDt ++ cat /tmp/tmp.s40ClR8zIh ++ rm /tmp/tmp.fEW22MNaDt /tmp/tmp.s40ClR8zIh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.icF5OEDYeY +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lv84OBh7cs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.icF5OEDYeY ++ cat /tmp/tmp.Lv84OBh7cs ++ rm /tmp/tmp.icF5OEDYeY /tmp/tmp.Lv84OBh7cs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TPH4E7iqMc +++ mktemp ++ local LAST_ERR=/tmp/tmp.YDsSx4RJHn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TPH4E7iqMc ++ cat /tmp/tmp.YDsSx4RJHn ++ rm /tmp/tmp.TPH4E7iqMc /tmp/tmp.YDsSx4RJHn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GOIySPRG93 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BXqWY2PF7P ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GOIySPRG93 ++ cat /tmp/tmp.BXqWY2PF7P ++ rm /tmp/tmp.GOIySPRG93 /tmp/tmp.BXqWY2PF7P ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mmMH6GJok4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vje2j1BwEx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mmMH6GJok4 ++ cat /tmp/tmp.Vje2j1BwEx ++ rm /tmp/tmp.mmMH6GJok4 /tmp/tmp.Vje2j1BwEx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F5ov8AakDJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.RxVKsWd2iH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F5ov8AakDJ ++ cat /tmp/tmp.RxVKsWd2iH ++ rm /tmp/tmp.F5ov8AakDJ /tmp/tmp.RxVKsWd2iH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T55wgxndAX +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vbg8r1hVje ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T55wgxndAX ++ cat /tmp/tmp.Vbg8r1hVje ++ rm /tmp/tmp.T55wgxndAX /tmp/tmp.Vbg8r1hVje ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 51 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W6it2rVIHB +++ mktemp ++ local LAST_ERR=/tmp/tmp.zE04kwQYxl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W6it2rVIHB ++ cat /tmp/tmp.zE04kwQYxl ++ rm /tmp/tmp.W6it2rVIHB /tmp/tmp.zE04kwQYxl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 52 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nzK7VNJiYD +++ mktemp ++ local LAST_ERR=/tmp/tmp.PhTZ1wuERM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nzK7VNJiYD ++ cat /tmp/tmp.PhTZ1wuERM ++ rm /tmp/tmp.nzK7VNJiYD /tmp/tmp.PhTZ1wuERM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 53 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XBftB3sOZi +++ mktemp ++ local LAST_ERR=/tmp/tmp.afroOX9yPd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XBftB3sOZi ++ cat /tmp/tmp.afroOX9yPd ++ rm /tmp/tmp.XBftB3sOZi /tmp/tmp.afroOX9yPd ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HEjZMnkuPE +++ mktemp ++ local LAST_ERR=/tmp/tmp.XZg7vhPBFa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HEjZMnkuPE ++ cat /tmp/tmp.XZg7vhPBFa ++ rm /tmp/tmp.HEjZMnkuPE /tmp/tmp.XZg7vhPBFa ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.zKE0fGLPKe ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hNOqgNcpaj +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.zKE0fGLPKe +++++ cat /tmp/tmp.hNOqgNcpaj +++++ rm /tmp/tmp.zKE0fGLPKe /tmp/tmp.hNOqgNcpaj +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XMebU6M5j6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WxvmJ65ayM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XMebU6M5j6 ++ cat /tmp/tmp.WxvmJ65ayM ++ rm /tmp/tmp.XMebU6M5j6 /tmp/tmp.WxvmJ65ayM ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2TTT82n43F +++ mktemp ++ local LAST_ERR=/tmp/tmp.dkaAEKikC8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2TTT82n43F ++ cat /tmp/tmp.dkaAEKikC8 ++ rm /tmp/tmp.2TTT82n43F /tmp/tmp.dkaAEKikC8 ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ZlN0hxKaoK ++ mktemp + local LAST_ERR=/tmp/tmp.QSABPFXkjr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZlN0hxKaoK secret/my-cluster-secrets patched + cat /tmp/tmp.QSABPFXkjr + rm /tmp/tmp.ZlN0hxKaoK /tmp/tmp.QSABPFXkjr + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SBDsbk1zN5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.UMqyxu0HXq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SBDsbk1zN5 ++ cat /tmp/tmp.UMqyxu0HXq ++ rm /tmp/tmp.SBDsbk1zN5 /tmp/tmp.UMqyxu0HXq ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fhNFEPGWCb +++ mktemp ++ local LAST_ERR=/tmp/tmp.l10FdGS5QE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fhNFEPGWCb ++ cat /tmp/tmp.l10FdGS5QE ++ rm /tmp/tmp.fhNFEPGWCb /tmp/tmp.l10FdGS5QE ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JqbHq0Kq74 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.GnTeFJAm6v +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JqbHq0Kq74 +++++ cat /tmp/tmp.GnTeFJAm6v +++++ rm /tmp/tmp.JqbHq0Kq74 /tmp/tmp.GnTeFJAm6v +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dBdA7h1HCf +++ mktemp ++ local LAST_ERR=/tmp/tmp.zGOM9iCgZu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dBdA7h1HCf ++ cat /tmp/tmp.zGOM9iCgZu ++ rm /tmp/tmp.dBdA7h1HCf /tmp/tmp.zGOM9iCgZu ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qdZDOuwbxZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.UJhxmeKW13 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qdZDOuwbxZ ++ cat /tmp/tmp.UJhxmeKW13 ++ rm /tmp/tmp.qdZDOuwbxZ /tmp/tmp.UJhxmeKW13 ++ return 0 + client_pod=pxc-client-59944c5bbf-vtvr5 + wait_pod pxc-client-59944c5bbf-vtvr5 + local pod=pxc-client-59944c5bbf-vtvr5 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-vtvr5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-vtvr5 condition met waiting for pod/pxc-client-59944c5bbf-vtvr5 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.pBNSeZJ1Hi/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2154/e2e-tests/users/compare/select-3.sql /tmp/tmp.pBNSeZJ1Hi/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EeROmdepn1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tZNJvm6UV4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EeROmdepn1 ++ cat /tmp/tmp.tZNJvm6UV4 ++ rm /tmp/tmp.EeROmdepn1 /tmp/tmp.tZNJvm6UV4 ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-9970 + local namespace=users-9970 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u + tee /tmp/tmp.pBNSeZJ1Hi/operator.log ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.7ZFuIL10Y4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.K9Vyd7aO4M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7ZFuIL10Y4 ++ cat /tmp/tmp.K9Vyd7aO4M ++ rm /tmp/tmp.7ZFuIL10Y4 /tmp/tmp.K9Vyd7aO4M ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-c799c8d46-lhr6j ++ mktemp + local LAST_OUT=/tmp/tmp.eM5k4ahoPg ++ mktemp + local LAST_ERR=/tmp/tmp.yTLizzI6LO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-c799c8d46-lhr6j + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eM5k4ahoPg + cat /tmp/tmp.yTLizzI6LO + rm /tmp/tmp.eM5k4ahoPg /tmp/tmp.yTLizzI6LO + return 0 } }, }, { }, }, { }, }, ""), }, { }, }, }, - }, - { - }, - { - }, - }, + }, - "0a1677cb7a7d5ba35a7870d1ca71aa35f529b587d1ba660b3af1660069d445", + "0c34de823eccb625864964457e22b7e6c94e5008b0b1a03fc951e7dfc5bbdcf", "1", - "10c34de823eccb625864964457e22b7e6c94e5008b0b1a03fc951e7dfc5bbdcf", ... // 16 identical fields ... // 16 identical fields 2025-11-07T00:37:23.001Z INFO setup Manager starting up {"gitCommit": "0538614fbed20de0f0d7c794f79f89e8a46b8543", "gitBranch": "PR-2154-0538614f", "buildTime": "2025-11-06T22:22:40Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-11-07T00:37:23.001Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1139000"} 2025-11-07T00:37:23.004Z INFO setup Registering Components. 2025-11-07T00:37:23.367Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-11-07T00:37:23.368Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-11-07T00:37:23.368Z INFO controller-runtime.metrics Starting metrics server 2025-11-07T00:37:23.368Z INFO controller-runtime.webhook Starting webhook server 2025-11-07T00:37:23.368Z INFO setup Starting the Cmd. 2025-11-07T00:37:23.368Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-11-07T00:37:23.369Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-11-07T00:37:23.369Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-11-07T00:37:23.369Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-11-07T00:37:23.470Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-11-07T00:37:23.499Z DEBUG events percona-xtradb-cluster-operator-c799c8d46-lhr6j_c36ffde7-d9d8-41a2-9ee3-51425d1de29f became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"105d14a3-a468-4543-bbba-5e8893da0cc0","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1762475843493263009"}, "reason": "LeaderElection"} 2025-11-07T00:37:23.499Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-11-07T00:37:23.499Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-11-07T00:37:23.499Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-11-07T00:37:23.499Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-11-07T00:37:23.500Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-11-07T00:37:23.600Z INFO Starting Controller {"controller": "pxc-controller"} 2025-11-07T00:37:23.600Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-11-07T00:37:23.600Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-11-07T00:37:23.600Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-11-07T00:37:23.601Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-11-07T00:37:23.601Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-11-07T00:37:57.562Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "586beee7-0a55-4aea-b86b-6e38d28314e9", "version": "1.19.0"} 2025-11-07T00:37:57.822Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "586beee7-0a55-4aea-b86b-6e38d28314e9", "secrets": "my-cluster-secrets"} 2025-11-07T00:37:58.041Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "586beee7-0a55-4aea-b86b-6e38d28314e9", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-07T00:37:58.059Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "586beee7-0a55-4aea-b86b-6e38d28314e9", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-07T00:37:58.626Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "586beee7-0a55-4aea-b86b-6e38d28314e9", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-07T00:37:58.731Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b2c9a112-b95f-4882-8b0e-6c6cc1102d6b", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-07T00:37:58.772Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b2c9a112-b95f-4882-8b0e-6c6cc1102d6b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-07T00:37:58.830Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b2c9a112-b95f-4882-8b0e-6c6cc1102d6b", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-07T00:37:58.875Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b2c9a112-b95f-4882-8b0e-6c6cc1102d6b", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-07T00:37:58.935Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b2c9a112-b95f-4882-8b0e-6c6cc1102d6b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-07T00:37:59.044Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b2c9a112-b95f-4882-8b0e-6c6cc1102d6b", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-07T00:38:00.353Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8110261c-ac52-4448-87c7-5a36656c4114", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-07T00:38:00.440Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8110261c-ac52-4448-87c7-5a36656c4114", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-07T00:39:11.597Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4869dd26-1c96-4f6d-aba1-326ecf339e98", "user": "operator"} 2025-11-07T00:39:11.637Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4869dd26-1c96-4f6d-aba1-326ecf339e98", "user": "monitor"} 2025-11-07T00:39:11.682Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4869dd26-1c96-4f6d-aba1-326ecf339e98"} 2025-11-07T00:39:11.714Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4869dd26-1c96-4f6d-aba1-326ecf339e98"} 2025-11-07T00:39:11.757Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4869dd26-1c96-4f6d-aba1-326ecf339e98", "user": "xtrabackup"} 2025-11-07T00:39:11.796Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4869dd26-1c96-4f6d-aba1-326ecf339e98"} 2025-11-07T00:39:11.830Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4869dd26-1c96-4f6d-aba1-326ecf339e98", "user": "replication"} 2025-11-07T00:39:11.838Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4869dd26-1c96-4f6d-aba1-326ecf339e98", "err": "get primary pxc pod: not found"} 2025-11-07T00:39:16.544Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b40f3015-a038-4205-979f-a889e0c47771", "err": "get primary pxc pod: not found"} 2025-11-07T00:39:21.686Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "3b78850f-2fdc-463e-8128-cdd56cd2ac95", "err": "get primary pxc pod: not found"} 2025-11-07T00:39:26.837Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "9d529edf-6708-4dca-a358-552a946253d7", "err": "get primary pxc pod: not found"} 2025-11-07T00:41:38.433Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "2e4f44e4-bc37-4462-bc91-754e840d4ad5", "user": "root"} 2025-11-07T00:41:38.555Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "2e4f44e4-bc37-4462-bc91-754e840d4ad5", "new version": "8.0.43-34.1"} 2025-11-07T00:41:40.262Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "2e4f44e4-bc37-4462-bc91-754e840d4ad5"} 2025-11-07T00:41:45.575Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "62bb9075-084a-4dd5-9929-afed1fc9a1ed"} 2025-11-07T00:41:50.168Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "08eb0e1b-6ff3-4368-9e75-8f60bbaeb758"} 2025-11-07T00:41:55.763Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "7c941236-8d65-45d4-b8d1-4903e40c6ef3"} 2025-11-07T00:42:01.759Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "ea2d1b9c-ebf6-4ce8-bd9a-e859d0b5c6c0"} 2025-11-07T00:42:07.267Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "9af777e3-a1b9-435e-ad9b-084b39e1883e"} 2025-11-07T00:42:12.564Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "f13f7b39-4de4-4b2d-8d01-4e2877e43ebb"} 2025-11-07T00:42:17.600Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "5dd32b41-bc34-4d0a-8c47-0f6098d618b2"} 2025-11-07T00:42:23.303Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "0f39d77a-fe2f-4626-8c0b-e375bc1ecc51"} 2025-11-07T00:42:28.457Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "9b380f70-2f26-4103-abc0-33d39efbae7e"} 2025-11-07T00:42:33.788Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "37096b34-51b3-4edc-9448-e92465592e3c"} 2025-11-07T00:42:39.259Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "19025980-3915-410f-a250-feb3077f2aa0"} 2025-11-07T00:42:44.715Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "85c968fd-13e9-4124-a48b-5796785e4d70"} 2025-11-07T00:42:49.961Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "6c139405-752c-4614-bb94-1293dcabc832"} 2025-11-07T00:42:55.263Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "bcf94f66-5cd3-4c32-b930-e959be0ad85d"} 2025-11-07T00:43:00.895Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "f89128d2-4e49-4c08-9e69-79d072e18630"} 2025-11-07T00:43:05.957Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8b28f3d7-f0c2-4926-94be-a23584327c52"} 2025-11-07T00:43:09.758Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "c45bf97c-1448-4d42-a9bf-f9d5ba8d1d2e", "user": "root"} 2025-11-07T00:43:09.778Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "c45bf97c-1448-4d42-a9bf-f9d5ba8d1d2e", "user": "root"} 2025-11-07T00:43:09.796Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "c45bf97c-1448-4d42-a9bf-f9d5ba8d1d2e", "secret": "some-name-mysql-init", "user": "root"} 2025-11-07T00:43:13.850Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "c45bf97c-1448-4d42-a9bf-f9d5ba8d1d2e"} 2025-11-07T00:43:13.869Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "c45bf97c-1448-4d42-a9bf-f9d5ba8d1d2e", "user": "root"} 2025-11-07T00:43:13.880Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "00868266-40ec-4c38-8568-17042ba09315"} 2025-11-07T00:43:13.888Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "c45bf97c-1448-4d42-a9bf-f9d5ba8d1d2e", "user": "root"} 2025-11-07T00:43:15.770Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "c45bf97c-1448-4d42-a9bf-f9d5ba8d1d2e"} 2025-11-07T00:43:21.873Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "c2984de1-4249-46ba-a7c9-bf7296dd3601"} 2025-11-07T00:43:27.257Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "93f1988e-28be-43c5-94a6-c9620a96a11f"} 2025-11-07T00:43:28.939Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8b49ee35-9a77-4a81-854d-d70d12784fa5", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:43:28.987Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8b49ee35-9a77-4a81-854d-d70d12784fa5", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:43:31.860Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8b49ee35-9a77-4a81-854d-d70d12784fa5", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-07T00:43:56.752Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "ae91f73c-2c2b-4820-bd5f-1cc784cf55eb", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-07T00:43:57.873Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "78f70d1f-7caf-4acb-ac6a-b34332f9ae41", "err": "get primary pxc pod: not found"} 2025-11-07T00:44:00.806Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b796289b-523d-441b-924c-9cb28abcd12a", "user": "proxyadmin"} 2025-11-07T00:44:00.806Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b796289b-523d-441b-924c-9cb28abcd12a", "user": "proxyadmin"} 2025-11-07T00:44:00.837Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b796289b-523d-441b-924c-9cb28abcd12a", "user": "proxyadmin"} 2025-11-07T00:44:00.875Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b796289b-523d-441b-924c-9cb28abcd12a", "user": "proxyadmin"} 2025-11-07T00:44:00.875Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b796289b-523d-441b-924c-9cb28abcd12a", "last-applied-secret": "be6be941b2c6717cb4b410ff5a375f762add324cdb867c43bbdbfd49a7ce7a9b"} 2025-11-07T00:44:00.879Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b796289b-523d-441b-924c-9cb28abcd12a", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:44:02.019Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "78f70d1f-7caf-4acb-ac6a-b34332f9ae41", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-07T00:45:02.664Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8e00c2aa-3cb0-422c-8619-8ac380cd4300"} 2025-11-07T00:45:06.903Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "005f5b7b-aa83-4179-99b7-8e3bfa3f0d54"} 2025-11-07T00:45:09.307Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "6c298a0f-1908-4acb-9398-d5f184d064ea", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:45:09.361Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "6c298a0f-1908-4acb-9398-d5f184d064ea", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:45:11.415Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "6a6ccadc-2c3b-4d80-9f8a-b6b00b0626a9", "user": "xtrabackup"} 2025-11-07T00:45:11.427Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "6a6ccadc-2c3b-4d80-9f8a-b6b00b0626a9", "user": "xtrabackup"} 2025-11-07T00:45:11.450Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "6a6ccadc-2c3b-4d80-9f8a-b6b00b0626a9", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-07T00:45:11.468Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "6a6ccadc-2c3b-4d80-9f8a-b6b00b0626a9", "user": "xtrabackup"} 2025-11-07T00:45:11.481Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "6a6ccadc-2c3b-4d80-9f8a-b6b00b0626a9", "user": "xtrabackup"} 2025-11-07T00:45:11.489Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "6a6ccadc-2c3b-4d80-9f8a-b6b00b0626a9", "last-applied-secret": "6e0a1677cb7a7d5ba35a7870d1ca71aa35f529b587d1ba660b3af1660069d445"} 2025-11-07T00:45:11.492Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "6a6ccadc-2c3b-4d80-9f8a-b6b00b0626a9", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:45:11.507Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "6c298a0f-1908-4acb-9398-d5f184d064ea"} 2025-11-07T00:46:04.435Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "afa58f42-3ef3-48ff-8bc5-60cd7c133c89", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9970 on 34.118.224.10:53: no such host"} 2025-11-07T00:46:09.447Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "20706e85-6dc9-4cfd-b2e7-b76d4506f07a", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9970 on 34.118.224.10:53: no such host"} 2025-11-07T00:46:14.771Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "426578e1-82cb-41f0-bc4d-d97f3b37eadb", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.139.32.70:33062: connect: connection refused"} 2025-11-07T00:47:01.748Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "43149b04-e0b0-478a-9a5c-9d4c4dda7c2e", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.139.34.53:33062: connect: connection refused"} 2025-11-07T00:47:06.917Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "62c7cc40-9f4d-4b9d-80dd-99538bf8f55e", "primary name": "some-name-pxc-0.some-name-pxc.users-9970.svc.cluster.local"} 2025-11-07T00:47:12.045Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a2cbeed5-a89c-47c9-aab4-08c007a7ec9f", "primary name": "some-name-pxc-0.some-name-pxc.users-9970.svc.cluster.local"} 2025-11-07T00:47:17.188Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "f41576db-5732-42ed-a9dd-06bc598363ca", "primary name": "some-name-pxc-0.some-name-pxc.users-9970.svc.cluster.local"} 2025-11-07T00:47:22.340Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "20752671-9725-4017-a554-08e198fb9610", "primary name": "some-name-pxc-0.some-name-pxc.users-9970.svc.cluster.local"} 2025-11-07T00:47:27.490Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "339fce2d-d56d-47bb-815a-53a50444665a", "primary name": "some-name-pxc-0.some-name-pxc.users-9970.svc.cluster.local"} 2025-11-07T00:47:32.658Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "62f3ba09-e7de-40ca-9dc1-b12e85e17bc1", "primary name": "some-name-pxc-0.some-name-pxc.users-9970.svc.cluster.local"} 2025-11-07T00:47:40.530Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "9f07fe7e-a7b1-4a36-9b60-ee87d05dd833"} 2025-11-07T00:47:45.684Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a222f128-0333-4495-aa47-89cdf137a955", "user": "monitor"} 2025-11-07T00:47:45.698Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a222f128-0333-4495-aa47-89cdf137a955", "user": "monitor"} 2025-11-07T00:47:45.715Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a222f128-0333-4495-aa47-89cdf137a955", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-07T00:47:45.718Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "fa2a24cd-5822-4fb7-b7b7-4c8ddb32ae70"} 2025-11-07T00:47:45.734Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a222f128-0333-4495-aa47-89cdf137a955", "user": "monitor"} 2025-11-07T00:47:45.753Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a222f128-0333-4495-aa47-89cdf137a955", "user": "monitor"} 2025-11-07T00:47:46.053Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a222f128-0333-4495-aa47-89cdf137a955", "last-applied-secret": "70c96a5b0f1665c130c9c8d10f2d6a7d8a187f1537f6a17a22e85cb6a038851c"} 2025-11-07T00:47:46.057Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a222f128-0333-4495-aa47-89cdf137a955", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:47:48.647Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a222f128-0333-4495-aa47-89cdf137a955", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-07T00:48:27.840Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "bbb595e6-25b2-47c8-86d3-a4d555a5bdf1", "user": "monitor"} 2025-11-07T00:48:29.855Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "bbb595e6-25b2-47c8-86d3-a4d555a5bdf1"} 2025-11-07T00:48:32.841Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "54f5ad48-c6eb-4f6c-a331-0e0f84223404", "user": "monitor"} 2025-11-07T00:48:34.735Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "54f5ad48-c6eb-4f6c-a331-0e0f84223404"} 2025-11-07T00:48:38.459Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "e4cfad08-347f-42ff-ae55-7142d65c0559", "user": "monitor"} 2025-11-07T00:48:40.354Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "e4cfad08-347f-42ff-ae55-7142d65c0559"} 2025-11-07T00:48:44.062Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "95a5086b-f92b-4e6f-8cc6-3317888243e0", "user": "monitor"} 2025-11-07T00:48:46.536Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "95a5086b-f92b-4e6f-8cc6-3317888243e0"} 2025-11-07T00:48:50.246Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "3a475901-dbde-4442-a6e0-3c161d2dcc11", "user": "monitor"} 2025-11-07T00:48:52.130Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "3a475901-dbde-4442-a6e0-3c161d2dcc11"} 2025-11-07T00:48:55.844Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "e78969bb-12c0-4164-9f48-82507e57bad7", "user": "monitor"} 2025-11-07T00:48:57.953Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "e78969bb-12c0-4164-9f48-82507e57bad7"} 2025-11-07T00:49:01.398Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "cd1d6d54-19e5-4379-8d75-3e073357b756", "user": "monitor"} 2025-11-07T00:49:02.282Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "cd1d6d54-19e5-4379-8d75-3e073357b756", "user": "monitor"} 2025-11-07T00:49:02.299Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "cd1d6d54-19e5-4379-8d75-3e073357b756", "last-applied-secret": "70c96a5b0f1665c130c9c8d10f2d6a7d8a187f1537f6a17a22e85cb6a038851c"} 2025-11-07T00:49:03.870Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "cd1d6d54-19e5-4379-8d75-3e073357b756"} 2025-11-07T00:49:09.470Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "0d5b43b1-899f-4083-a04a-8e303a133127"} 2025-11-07T00:49:14.965Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "b4e30874-5998-42a6-a118-5835139a723b"} 2025-11-07T00:49:20.060Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "94cac4ed-e544-4c8b-b8bc-04f796b8ebc6"} 2025-11-07T00:49:25.276Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "495cb522-3e1f-4ccc-b501-269cd7723c1e"} 2025-11-07T00:49:29.269Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "f30c97de-0ab1-4e11-babe-5a82ca4bd58a", "user": "operator"} 2025-11-07T00:49:29.283Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "f30c97de-0ab1-4e11-babe-5a82ca4bd58a", "user": "operator"} 2025-11-07T00:49:29.518Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "f30c97de-0ab1-4e11-babe-5a82ca4bd58a", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-07T00:49:29.702Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "f30c97de-0ab1-4e11-babe-5a82ca4bd58a", "user": "operator"} 2025-11-07T00:49:29.722Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "f30c97de-0ab1-4e11-babe-5a82ca4bd58a", "user": "operator"} 2025-11-07T00:49:29.742Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "f30c97de-0ab1-4e11-babe-5a82ca4bd58a", "last-applied-secret": "f3dc0115b74edb0865cf42803c51472e27b9cc0f4e157c315d9f3917895f7f11"} 2025-11-07T00:49:29.746Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "f30c97de-0ab1-4e11-babe-5a82ca4bd58a", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:49:31.653Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "03fc3c97-9442-45e4-8ba1-b06bd4301fc5", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-07T00:50:05.851Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "c6eaaab7-feef-49fe-b482-4f5d6bb47cb4"} 2025-11-07T00:50:10.600Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "2ecb085a-0587-4fd5-b248-4f0de39846b3"} 2025-11-07T00:50:16.257Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "c6eca0c1-349e-49fe-b6ef-91c81a2451ed"} 2025-11-07T00:50:21.754Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "f1cc6b15-57ac-4f69-aaae-bc312cf17d1e"} 2025-11-07T00:50:22.661Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "secrets": "my-cluster-secrets-2"} 2025-11-07T00:50:22.667Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "root"} 2025-11-07T00:50:22.688Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "root"} 2025-11-07T00:50:22.709Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "secret": "some-name-mysql-init", "user": "root"} 2025-11-07T00:50:25.074Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1"} 2025-11-07T00:50:25.095Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "root"} 2025-11-07T00:50:25.119Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "root"} 2025-11-07T00:50:25.128Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "operator"} 2025-11-07T00:50:25.143Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "operator"} 2025-11-07T00:50:25.165Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-07T00:50:25.182Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "operator"} 2025-11-07T00:50:25.198Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "operator"} 2025-11-07T00:50:25.207Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "monitor"} 2025-11-07T00:50:25.220Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "monitor"} 2025-11-07T00:50:25.251Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-07T00:50:25.270Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "monitor"} 2025-11-07T00:50:25.290Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "monitor"} 2025-11-07T00:50:25.582Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "xtrabackup"} 2025-11-07T00:50:25.596Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "xtrabackup"} 2025-11-07T00:50:25.614Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-07T00:50:25.633Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "xtrabackup"} 2025-11-07T00:50:25.647Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "xtrabackup"} 2025-11-07T00:50:25.656Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "replication"} 2025-11-07T00:50:25.668Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "replication"} 2025-11-07T00:50:25.688Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-07T00:50:25.723Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "replication"} 2025-11-07T00:50:25.738Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "replication"} 2025-11-07T00:50:25.738Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "proxyadmin"} 2025-11-07T00:50:25.757Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "proxyadmin"} 2025-11-07T00:50:25.775Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "user": "proxyadmin"} 2025-11-07T00:50:25.775Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "last-applied-secret": "d6f16eb036c183c7389e22a6264bf6852a48df17360b31f6afde437ef21a6c9a"} 2025-11-07T00:50:25.775Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "last-applied-secret": "d6f16eb036c183c7389e22a6264bf6852a48df17360b31f6afde437ef21a6c9a"} 2025-11-07T00:50:25.778Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:50:25.828Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:50:27.675Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "21683362-95fa-400b-bd8d-390a877030a1", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-07T00:51:19.086Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8a9a7cce-9d59-4bab-b993-60f209222b06", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9970 on 34.118.224.10:53: no such host"} 2025-11-07T00:51:24.389Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "483c70ed-216a-4f87-b23a-00617fa1edc7", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9970 on 34.118.224.10:53: no such host"} 2025-11-07T00:51:30.268Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "d6931590-ba67-4e20-a15a-257a40e16d10", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9970 on 34.118.224.10:53: no such host"} 2025-11-07T00:52:17.239Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "aa9f626d-910f-4cf3-b8c0-770f27c927bf", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.139.34.56:33062: connect: connection refused"} 2025-11-07T00:52:27.615Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "805d5f6a-4bb2-432b-ad14-33ceccc92e54", "primary name": "some-name-pxc-0.some-name-pxc.users-9970.svc.cluster.local"} 2025-11-07T00:52:32.756Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "1a0b737f-522f-46bb-b4eb-7d4e3011f74a", "primary name": "some-name-pxc-0.some-name-pxc.users-9970.svc.cluster.local"} 2025-11-07T00:52:43.072Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "721e24ea-dc4d-4aeb-8e7a-5164b30bacbd", "primary name": "some-name-pxc-0.some-name-pxc.users-9970.svc.cluster.local"} 2025-11-07T00:52:48.225Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "9285d129-aa5d-4936-a5e0-20b661e6681e", "primary name": "some-name-pxc-0.some-name-pxc.users-9970.svc.cluster.local"} 2025-11-07T00:53:00.421Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "121a5937-a55a-4119-a17d-685088684025", "user": "monitor"} 2025-11-07T00:53:01.418Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "121a5937-a55a-4119-a17d-685088684025", "user": "monitor"} 2025-11-07T00:53:01.437Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "121a5937-a55a-4119-a17d-685088684025", "last-applied-secret": "d6f16eb036c183c7389e22a6264bf6852a48df17360b31f6afde437ef21a6c9a"} 2025-11-07T00:53:03.265Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "121a5937-a55a-4119-a17d-685088684025"} 2025-11-07T00:53:03.672Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8850adc6-58fe-48ae-87e6-ccf0ac48c2ca", "user": "operator"} 2025-11-07T00:53:03.687Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8850adc6-58fe-48ae-87e6-ccf0ac48c2ca", "user": "operator"} 2025-11-07T00:53:03.703Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8850adc6-58fe-48ae-87e6-ccf0ac48c2ca", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-07T00:53:03.735Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8850adc6-58fe-48ae-87e6-ccf0ac48c2ca", "user": "operator"} 2025-11-07T00:53:03.752Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8850adc6-58fe-48ae-87e6-ccf0ac48c2ca", "user": "operator"} 2025-11-07T00:53:03.772Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8850adc6-58fe-48ae-87e6-ccf0ac48c2ca", "last-applied-secret": "10c34de823eccb625864964457e22b7e6c94e5008b0b1a03fc951e7dfc5bbdcf"} 2025-11-07T00:53:03.778Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8850adc6-58fe-48ae-87e6-ccf0ac48c2ca", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:53:07.473Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8850adc6-58fe-48ae-87e6-ccf0ac48c2ca", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9970.svc.c' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-07T00:53:45.198Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "49f5ee71-b31a-4429-ae54-b9cf7319dee2"} 2025-11-07T00:53:49.192Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "0f36d534-8c49-47e0-a063-fd68e58dafb5"} 2025-11-07T00:53:54.800Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "c232655e-5323-41c7-afdf-8f7dd1e666f6"} 2025-11-07T00:54:01.008Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "6fbea456-d2ef-4b49-90fe-8911e817f0e4"} 2025-11-07T00:54:05.399Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "67d4e8c8-eafe-4be1-8d6e-d98a8ab11cd7"} 2025-11-07T00:54:10.675Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "ffb40a71-72f1-41ef-ae37-8575b5b63982"} 2025-11-07T00:54:16.545Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "aa88ca82-81b5-4456-87a4-365f3e74456e"} 2025-11-07T00:54:21.207Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "ea850059-80a9-48a0-a5ca-67d83682f1fa"} 2025-11-07T00:54:26.770Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "ea5238f3-2e31-4b1f-ab47-531c05ff18f7"} 2025-11-07T00:54:32.209Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "ea698b49-5b50-4934-9cf1-0109be0098a2"} 2025-11-07T00:54:37.477Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "0954b310-5b5a-4b19-9a23-a3e471c81b15"} 2025-11-07T00:54:42.605Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "ecfc220c-c1f4-487b-b39e-87a131f684c8"} 2025-11-07T00:54:48.007Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "8d5ae0c4-7e22-4c5d-927e-c850753b00ee"} 2025-11-07T00:54:53.486Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "211b82e3-25a7-47f6-a83d-4df8ffbbdd1c"} 2025-11-07T00:54:58.890Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "9dc7e005-4271-4d7f-9a47-f5af7de214b6"} 2025-11-07T00:55:00.347Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "root"} 2025-11-07T00:55:00.366Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "root"} 2025-11-07T00:55:00.390Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "secret": "some-name-mysql-init", "user": "root"} 2025-11-07T00:55:03.125Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80"} 2025-11-07T00:55:03.146Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "root"} 2025-11-07T00:55:03.167Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "root"} 2025-11-07T00:55:03.185Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "monitor"} 2025-11-07T00:55:03.199Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "monitor"} 2025-11-07T00:55:03.222Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-07T00:55:03.242Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "monitor"} 2025-11-07T00:55:03.264Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "monitor"} 2025-11-07T00:55:03.534Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "xtrabackup"} 2025-11-07T00:55:03.549Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "xtrabackup"} 2025-11-07T00:55:03.566Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-07T00:55:03.585Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "xtrabackup"} 2025-11-07T00:55:03.601Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "xtrabackup"} 2025-11-07T00:55:03.607Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "proxyadmin"} 2025-11-07T00:55:03.628Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "proxyadmin"} 2025-11-07T00:55:03.649Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "user": "proxyadmin"} 2025-11-07T00:55:03.649Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "last-applied-secret": "27e0365668efbc24093b5d059f0490fccf40f2d834ce546b37328d9418c84f64"} 2025-11-07T00:55:03.649Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "last-applied-secret": "27e0365668efbc24093b5d059f0490fccf40f2d834ce546b37328d9418c84f64"} 2025-11-07T00:55:03.652Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:55:03.707Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:55:05.467Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "4a4a28d7-abf5-46fc-85b0-e1b3d3a91c80", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-07T00:55:21.260Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "d517e047-0c8c-4aa6-9cfe-1951ecac0d4e", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:55:21.305Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "d517e047-0c8c-4aa6-9cfe-1951ecac0d4e", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-07T00:55:21.362Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "d517e047-0c8c-4aa6-9cfe-1951ecac0d4e", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-07T00:55:21.438Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "d517e047-0c8c-4aa6-9cfe-1951ecac0d4e", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-07T00:55:21.523Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "d517e047-0c8c-4aa6-9cfe-1951ecac0d4e", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-07T00:55:22.320Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "725bf1de-eff3-494c-92b0-cffea999d803", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-07T00:57:48.062Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "3843b079-b04d-48a3-85fa-f1b1d4a3e30d", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp 10.139.34.60:33062: i/o timeout"} 2025-11-07T00:58:14.831Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "root"} 2025-11-07T00:58:14.851Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "root"} 2025-11-07T00:58:14.913Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "secret": "some-name-mysql-init", "user": "root"} 2025-11-07T00:58:14.972Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "root"} 2025-11-07T00:58:14.990Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "root"} 2025-11-07T00:58:14.997Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "operator"} 2025-11-07T00:58:15.011Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "operator"} 2025-11-07T00:58:15.042Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-07T00:58:15.072Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "operator"} 2025-11-07T00:58:15.089Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "operator"} 2025-11-07T00:58:15.097Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "monitor"} 2025-11-07T00:58:15.111Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "monitor"} 2025-11-07T00:58:15.142Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-07T00:58:15.175Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "monitor"} 2025-11-07T00:58:15.498Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "xtrabackup"} 2025-11-07T00:58:15.512Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "xtrabackup"} 2025-11-07T00:58:15.532Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-07T00:58:15.554Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "xtrabackup"} 2025-11-07T00:58:15.569Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "xtrabackup"} 2025-11-07T00:58:15.576Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "replication"} 2025-11-07T00:58:15.588Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "replication"} 2025-11-07T00:58:15.607Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-07T00:58:15.628Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "replication"} 2025-11-07T00:58:15.640Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "user": "replication"} 2025-11-07T00:58:15.640Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "last-applied-secret": "f3dc0115b74edb0865cf42803c51472e27b9cc0f4e157c315d9f3917895f7f11"} 2025-11-07T00:58:15.645Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a1e3365e-30c8-4a15-a4ad-c638512a43e1", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-07T00:59:17.054Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "2bb5dfff-01a5-4593-802e-083efde17bda", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: invalid connection"} 2025-11-07T01:00:46.463Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "3b57e87c-091f-4b7a-93d7-b9badf0beaef", "user": "monitor"} 2025-11-07T01:00:47.290Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "3b57e87c-091f-4b7a-93d7-b9badf0beaef", "user": "monitor"} 2025-11-07T01:00:49.430Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "147bdafa-14fc-48c0-b8e5-234b523ea06d", "user": "monitor"} 2025-11-07T01:00:49.441Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "147bdafa-14fc-48c0-b8e5-234b523ea06d", "user": "monitor"} 2025-11-07T01:00:49.460Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "147bdafa-14fc-48c0-b8e5-234b523ea06d", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-07T01:00:49.479Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "147bdafa-14fc-48c0-b8e5-234b523ea06d", "user": "monitor"} 2025-11-07T01:00:51.473Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "aa9c5681-de31-4b3a-ae7f-150762c167e9", "user": "monitor"} 2025-11-07T01:00:57.083Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "795c508f-75b2-4e1e-bc08-8ae7199047a4", "user": "monitor"} 2025-11-07T01:01:02.525Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "a2e4e8d8-5407-4d4b-9959-b6f33b9b3b70", "user": "monitor"} 2025-11-07T01:01:08.182Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "faf2e0d2-f6b2-400d-bc7c-27feb4e27605", "user": "monitor"} 2025-11-07T01:01:14.052Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9970", "name": "some-name", "reconcileID": "7aa65e89-6924-4ff8-b9ba-fe2cd1378f87", "user": "monitor"} ... // 22 identical fields - "27e0365668efbc24093b5d059f0490fccf40f2d834ce546b37328d9418c84f64", + "27e0365668efbc24093b5d059f0490fccf40f2d834ce546b37328d9418c84f64", ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields ... // 3 identical elements ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields ... // 4 identical fields ... // 5 identical fields ... // 5 identical fields ... // 5 identical fields "6", - "6eb036c183c7389e22a6264bf6852a48df17360b31f6afde437ef21a6c9a", ... // 6 identical fields ... // 6 identical fields - "70c96a5b0f1665c130c9c8d10f2d6a7d8a187f1537f6a17a22e85cb6a038851c", + "70c96a5b0f1665c130c9c8d10f2d6a7d8a187f1537f6a17a22e85cb6a038851c", ... // 7 identical fields ... // 8 identical fields ... // 9 identical fields ... // 9 identical fields AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ + APIVersion: "", - APIVersion: "apps/v1", - APIVersion: "apps/v1", - APIVersion: "v1", Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, - Args: []string{"logrotate"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, + "b036c183c7389e22a6264bf6852a48df17360b31f6afde437ef21a6c9a", - "be6be941b2c6717cb4b410ff5a375f762add324cdb867c43bbdbfd49a7ce7a9b", Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMapKeyRef: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-11-07 00:37:58 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-proxysql-5444799b8c", - CurrentRevision: "some-name-proxysql-5845475988", - CurrentRevision: "some-name-proxysql-59c656c4f7", - CurrentRevision: "some-name-proxysql-665bcfc6b7", - CurrentRevision: "some-name-proxysql-845cf8c4d7", - CurrentRevision: "some-name-proxysql-86cdfbb8c9", - CurrentRevision: "some-name-pxc-5fd9c4577b", - CurrentRevision: "some-name-pxc-6548ffcb75", - CurrentRevision: "some-name-pxc-6cdfcfbd7d", - CurrentRevision: "some-name-pxc-875657bd8", + "d", - "d6f", - "d6f16eb036c183c7389e22a6264bf6852a48df17360b31f6afde437ef21a6c9a", + "d6f16eb036c183c7389e22a6264bf6852a48df17360b31f6afde437ef21a6c9a", DataSource: nil, DataSourceRef: nil, - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", "e", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, - EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, Env: []v1.EnvVar{ - Env: []v1.EnvVar{ EphemeralContainers: nil, + "f16", - "f3dc0115b74edb0865cf42803c51472e27b9cc0f4e157c315d9f3917895f7f11", + "f3dc0115b74edb0865cf42803c51472e27b9cc0f4e157c315d9f3917895f7f11", FailureThreshold: 3, FC: nil, FieldPath: "metadata.name", FieldPath: "metadata.namespace", FieldRef: &v1.ObjectFieldSelector{ - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 HostAliases: nil, HostIP: "", HostPort: 0, - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", ImagePullPolicy: "Always", - ImagePullPolicy: "Always", InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "6e0a1677cb7a7d5ba35a7870d1ca71aa35f529b587d1ba660b3af1660069d445", + "last-applied-secret": "be6be941b2c6717cb4b410ff5a375f762add324cdb867c43bbdbfd49a7ce7a9b", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, [mysql] 2025/11/07 00:57:32 packets.go:58 unexpected EOF [mysql] 2025/11/07 00:59:17 packets.go:58 read tcp 10.139.32.67:56596->10.139.32.80:33062: read: connection reset by peer Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, {Name: "CLUSTER_HASH", Value: "2949110"}, Name: "config", Name: "DEFAULT_AUTHENTICATION_PLUGIN", - {Name: "IS_LOGCOLLECTOR", Value: "yes"}, Name: "ist", {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - Name: "logrotate", - Name: "logs", {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, - {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"}, {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"}, Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, Name: "POD_NAME", Name: "POD_NAMESPASE", - {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, - {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, Name: "proxyadm", {Name: "READINESS_CHECK_TIMEOUT", Value: "15"}, - {Name: "SERVICE_TYPE", Value: "mysql"}, Namespace: "users-9970", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "be6be941b2c6717cb4b410ff5a375f762add324cdb867c43bbdbfd49a7ce7a9b", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "27e0365668efbc24093b5d059f0490fccf40f2d834ce546b37328d9418c84f64", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "fac8aadd-6e3f-4bbe-8217-af797cd18b23", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjdlMDM2NTY2OGVmYmMyNDA5M2I1ZDA1OWYwNDkwZmNjZjQwZjJkODM0Y2U1NDZiMzczMjhkOTQxOGM4NGY2NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTBjMzRkZTgyM2VjY2I2MjU4NjQ5NjQ0NTdlMjJiN2U2Yzk0ZTUwMDhiMGIxYTAzZmM5NTFlN2RmYzViYmRjZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTBjMzRkZTgyM2VjY2I2MjU4NjQ5NjQ0NTdlMjJiN2U2Yzk0ZTUwMDhiMGIxYTAzZmM5NTFlN2RmYzViYmRjZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzBjOTZhNWIwZjE2NjVjMTMwYzljOGQxMGYyZDZhN2Q4YTE4N2YxNTM3ZjZhMTdhMjJlODVjYjZhMDM4ODUxYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzBjOTZhNWIwZjE2NjVjMTMwYzljOGQxMGYyZDZhN2Q4YTE4N2YxNTM3ZjZhMTdhMjJlODVjYjZhMDM4ODUxYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmU2YmU5NDFiMmM2NzE3Y2I0YjQxMGZmNWEzNzVmNzYyYWRkMzI0Y2RiODY3YzQzYmJkYmZkNDlhN2NlN2E5YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDZmMTZlYjAzNmMxODNjNzM4OWUyMmE2MjY0YmY2ODUyYTQ4ZGYxNzM2MGIzMWY2YWZkZTQzN2VmMjFhNmM5YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDZmMTZlYjAzNmMxODNjNzM4OWUyMmE2MjY0YmY2ODUyYTQ4ZGYxNzM2MGIzMWY2YWZkZTQzN2VmMjFhNmM5YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjNkYzAxMTViNzRlZGIwODY1Y2Y0MjgwM2M1MTQ3MmUyN2I5Y2MwZjRlMTU3YzMxNWQ5ZjM5MTc4OTVmN2YxMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjNkYzAxMTViNzRlZGIwODY1Y2Y0MjgwM2M1MTQ3MmUyN2I5Y2MwZjRlMTU3YzMxNWQ5ZjM5MTc4OTVmN2YxMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjdlMDM2NTY2OGVmYmMyNDA5M2I1ZDA1OWYwNDkwZmNjZjQwZjJkODM0Y2U1NDZiMzczMjhkOTQxOGM4NGY2NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjdlMDM2NTY2OGVmYmMyNDA5M2I1ZDA1OWYwNDkwZmNjZjQwZjJkODM0Y2U1NDZiMzczMjhkOTQxOGM4NGY2NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjdlMDM2NTY2OGVmYmMyNDA5M2I1ZDA1OWYwNDkwZmNjZjQwZjJkODM0Y2U1NDZiMzczMjhkOTQxOGM4NGY2NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTU0LTA1Mzg2MTRmIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM4LjAiLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjdlMDM2NTY2OGVmYmMyNDA5M2I1ZDA1OWYwNDkwZmNjZjQwZjJkODM0Y2U1NDZiMzczMjhkOTQxOGM4NGY2NCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTU0LTA1Mzg2MTRmIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIyOTQ5MTEwIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmUwYTE2NzdjYjdhN2Q1YmEzNWE3ODcwZDFjYTcxYWEzNWY1MjliNTg3ZDFiYTY2MGIzYWYxNjYwMDY5ZDQ0NSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNmUwYTE2NzdjYjdhN2Q1YmEzNWE3ODcwZDFjYTcxYWEzNWY1MjliNTg3ZDFiYTY2MGIzYWYxNjYwMDY5ZDQ0NSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDZmMTZlYjAzNmMxODNjNzM4OWUyMmE2MjY0YmY2ODUyYTQ4ZGYxNzM2MGIzMWY2YWZkZTQzN2VmMjFhNmM5YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZDZmMTZlYjAzNmMxODNjNzM4OWUyMmE2MjY0YmY2ODUyYTQ4ZGYxNzM2MGIzMWY2YWZkZTQzN2VmMjFhNmM5YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjNkYzAxMTViNzRlZGIwODY1Y2Y0MjgwM2M1MTQ3MmUyN2I5Y2MwZjRlMTU3YzMxNWQ5ZjM5MTc4OTVmN2YxMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmU2YmU5NDFiMmM2NzE3Y2I0YjQxMGZmNWEzNzVmNzYyYWRkMzI0Y2RiODY3YzQzYmJkYmZkNDlhN2NlN2E5YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: nil, Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, ResizePolicy: nil, ResourceFieldRef: nil, Resources: {}, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1762475915072591006", - ResourceVersion: "1762476095733999007", - ResourceVersion: "1762476228331087006", - ResourceVersion: "1762476291715103006", - ResourceVersion: "1762476310566463006", - ResourceVersion: "1762476457042719007", - ResourceVersion: "1762476506080335006", - ResourceVersion: "1762476585310063006", - ResourceVersion: "1762476648395151006", - ResourceVersion: "1762476774469935007", - ResourceVersion: "1762476889451775006", - ResourceVersion: "1762476919707231007", - ResourceVersion: "1762477091227647007", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-11-07 00:37:58 +0000 UTC", - Time: s"2025-11-07 00:38:35 +0000 UTC", - Time: s"2025-11-07 00:41:35 +0000 UTC", - Time: s"2025-11-07 00:43:28 +0000 UTC", - Time: s"2025-11-07 00:43:48 +0000 UTC", - Time: s"2025-11-07 00:44:00 +0000 UTC", - Time: s"2025-11-07 00:44:51 +0000 UTC", - Time: s"2025-11-07 00:45:09 +0000 UTC", - Time: s"2025-11-07 00:45:10 +0000 UTC", - Time: s"2025-11-07 00:45:11 +0000 UTC", - Time: s"2025-11-07 00:47:37 +0000 UTC", - Time: s"2025-11-07 00:47:46 +0000 UTC", - Time: s"2025-11-07 00:48:26 +0000 UTC", - Time: s"2025-11-07 00:49:30 +0000 UTC", - Time: s"2025-11-07 00:49:45 +0000 UTC", - Time: s"2025-11-07 00:50:25 +0000 UTC", - Time: s"2025-11-07 00:50:48 +0000 UTC", - Time: s"2025-11-07 00:52:54 +0000 UTC", - Time: s"2025-11-07 00:53:03 +0000 UTC", - Time: s"2025-11-07 00:54:49 +0000 UTC", - Time: s"2025-11-07 00:55:03 +0000 UTC", - Time: s"2025-11-07 00:55:19 +0000 UTC", - Time: s"2025-11-07 00:55:21 +0000 UTC", - Time: s"2025-11-07 00:58:11 +0000 UTC", Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "2c953636-90a2-406a-8614-e4dda3588e83", - UID: "39cb73d9-210d-466b-888d-d3b697419702", + UpdatedReplicas: 0, - UpdatedReplicas: 1, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-proxysql-5444799b8c", - UpdateRevision: "some-name-proxysql-5845475988", - UpdateRevision: "some-name-proxysql-59c656c4f7", - UpdateRevision: "some-name-proxysql-665bcfc6b7", - UpdateRevision: "some-name-proxysql-845cf8c4d7", - UpdateRevision: "some-name-proxysql-86cdfbb8c9", - UpdateRevision: "some-name-pxc-5785fff7d7", - UpdateRevision: "some-name-pxc-5fd9c4577b", - UpdateRevision: "some-name-pxc-6548ffcb75", - UpdateRevision: "some-name-pxc-6cdfcfbd7d", - UpdateRevision: "some-name-pxc-875657bd8", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ Value: "", + Value: "caching_sha2_password", ValueFrom: nil, ValueFrom: &v1.EnvVarSource{ - Value: "mysql_native_password", VolumeAttributesClassName: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, - VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-9970 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.qgYrfNbE9h ++ mktemp + local LAST_ERR=/tmp/tmp.80N68qczIT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qgYrfNbE9h perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-9970 namespace + cat /tmp/tmp.80N68qczIT + rm /tmp/tmp.qgYrfNbE9h /tmp/tmp.80N68qczIT + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.rsiY9vY0bW ++ mktemp + local LAST_ERR=/tmp/tmp.HoySvmuPBE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rsiY9vY0bW No resources found + cat /tmp/tmp.HoySvmuPBE + rm /tmp/tmp.rsiY9vY0bW /tmp/tmp.HoySvmuPBE + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.XCJrLtDZQZ ++ mktemp + local LAST_ERR=/tmp/tmp.oj6mLUL1sq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XCJrLtDZQZ No resources found + cat /tmp/tmp.oj6mLUL1sq + rm /tmp/tmp.XCJrLtDZQZ /tmp/tmp.oj6mLUL1sq + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.vM4WgcHxJu ++ mktemp + local LAST_ERR=/tmp/tmp.Bl2OMAMIIL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vM4WgcHxJu validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.Bl2OMAMIIL + rm /tmp/tmp.vM4WgcHxJu /tmp/tmp.Bl2OMAMIIL + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-9970 + rm -rf /tmp/tmp.pBNSeZJ1Hi + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.2SK3tHDsGT + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed + local LAST_OUT=/tmp/tmp.LgXxzsda1I ----------------------------------------------------------------------------------- ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.yPZZ0SSnd4 + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.kkSZEq4TxL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-9970 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator