Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/logs/users-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-8513 + local ns=users-8513 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-2794 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.x9ErnMfbIw ++ mktemp + local LAST_ERR=/tmp/tmp.tQ9mKQT3Ah + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.x9ErnMfbIw perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-2794 namespace + cat /tmp/tmp.tQ9mKQT3Ah + rm /tmp/tmp.x9ErnMfbIw /tmp/tmp.tQ9mKQT3Ah + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.8W6OjLaroZ ++ mktemp + local LAST_ERR=/tmp/tmp.BEkTYRamzD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8W6OjLaroZ No resources found + cat /tmp/tmp.BEkTYRamzD + rm /tmp/tmp.8W6OjLaroZ /tmp/tmp.BEkTYRamzD + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.VHzcS3E2Jz ++ mktemp + local LAST_ERR=/tmp/tmp.tQUMTRt5pc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VHzcS3E2Jz No resources found + cat /tmp/tmp.tQUMTRt5pc + rm /tmp/tmp.VHzcS3E2Jz /tmp/tmp.tQUMTRt5pc + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- + xargs kubectl delete ns cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.PuXX5KTEsD + awk '{print$1}' + local LAST_OUT=/tmp/tmp.mQt9SAO6vB ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.0MdjgGYmap + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.z2E8l6hS5M + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PuXX5KTEsD + cat /tmp/tmp.0MdjgGYmap + rm /tmp/tmp.PuXX5KTEsD /tmp/tmp.0MdjgGYmap + return 0 namespace "users-2794" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mQt9SAO6vB namespace "pxc-operator" deleted + cat /tmp/tmp.z2E8l6hS5M + rm /tmp/tmp.mQt9SAO6vB /tmp/tmp.z2E8l6hS5M + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.HJPfeCsiWg ++ mktemp + local LAST_ERR=/tmp/tmp.9wJeMm1MVG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HJPfeCsiWg namespace/pxc-operator created + cat /tmp/tmp.9wJeMm1MVG + rm /tmp/tmp.HJPfeCsiWg /tmp/tmp.9wJeMm1MVG + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.1Li38OxBnk +++ mktemp ++ local LAST_ERR=/tmp/tmp.0CDShWN66R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1Li38OxBnk ++ cat /tmp/tmp.0CDShWN66R ++ rm /tmp/tmp.1Li38OxBnk /tmp/tmp.0CDShWN66R ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2265-30ece6d6-6-cluster5 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.jZoQTDwg0R ++ mktemp + local LAST_ERR=/tmp/tmp.O7PhGsftjX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2265-30ece6d6-6-cluster5 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jZoQTDwg0R Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2265-30ece6d6-6-cluster5" modified. + cat /tmp/tmp.O7PhGsftjX + rm /tmp/tmp.jZoQTDwg0R /tmp/tmp.O7PhGsftjX + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.Rw2rpxjwEL ++ mktemp + local LAST_ERR=/tmp/tmp.t6paAoD3EY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Rw2rpxjwEL customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.t6paAoD3EY + rm /tmp/tmp.Rw2rpxjwEL /tmp/tmp.t6paAoD3EY + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.3hUWqpwr7G ++ mktemp + local LAST_ERR=/tmp/tmp.RzZdX2M9IQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3hUWqpwr7G clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.RzZdX2M9IQ + rm /tmp/tmp.3hUWqpwr7G /tmp/tmp.RzZdX2M9IQ + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2265-30ece6d6^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + local LAST_OUT=/tmp/tmp.4LRarW1ksi ++ mktemp + local LAST_ERR=/tmp/tmp.xxplMZmoAZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4LRarW1ksi deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.xxplMZmoAZ + rm /tmp/tmp.4LRarW1ksi /tmp/tmp.xxplMZmoAZ + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.3KyocG1QBt ++ mktemp + local LAST_ERR=/tmp/tmp.BWl10BsgTf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3KyocG1QBt pod/percona-xtradb-cluster-operator-8547dbc67b-4l2tb condition met + cat /tmp/tmp.BWl10BsgTf + rm /tmp/tmp.3KyocG1QBt /tmp/tmp.BWl10BsgTf + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.RAhfctd2qk +++ mktemp ++ local LAST_ERR=/tmp/tmp.eEXKjZ9X20 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RAhfctd2qk ++ cat /tmp/tmp.eEXKjZ9X20 ++ rm /tmp/tmp.RAhfctd2qk /tmp/tmp.eEXKjZ9X20 ++ return 0 + wait_pod percona-xtradb-cluster-operator-8547dbc67b-4l2tb 480 pxc-operator + local pod=percona-xtradb-cluster-operator-8547dbc67b-4l2tb + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-8547dbc67b-4l2tb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-8547dbc67b-4l2tb condition met waiting for pod/percona-xtradb-cluster-operator-8547dbc67b-4l2tb to become Ready.Ok + sleep 3 + create_namespace users-8513 + local namespace=users-8513 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces users-8513' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-8513 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-8513 ++ mktemp ++ mktemp + awk '{print$1}' + local LAST_OUT=/tmp/tmp.o28ABy9YUU ++ mktemp + local LAST_OUT=/tmp/tmp.wTiMe49tkD + local LAST_ERR=/tmp/tmp.nSqaskSVQB + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.xoZLUTOFFw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-8513 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-8513 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wTiMe49tkD + cat /tmp/tmp.xoZLUTOFFw + rm /tmp/tmp.wTiMe49tkD /tmp/tmp.xoZLUTOFFw + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-8513 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.o28ABy9YUU + cat /tmp/tmp.nSqaskSVQB Error from server (NotFound): namespaces "users-8513" not found + rm /tmp/tmp.o28ABy9YUU /tmp/tmp.nSqaskSVQB + return 1 + : + wait_for_delete namespace/users-8513 + local res=namespace/users-8513 + echo -n 'waiting for namespace/users-8513 to be deleted' waiting for namespace/users-8513 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-8513" not found + desc 'create namespace users-8513' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-8513 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-8513 ++ mktemp + local LAST_OUT=/tmp/tmp.iIpEI1IIqC ++ mktemp + local LAST_ERR=/tmp/tmp.QkCFRdEHTg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-8513 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iIpEI1IIqC namespace/users-8513 created + cat /tmp/tmp.QkCFRdEHTg + rm /tmp/tmp.iIpEI1IIqC /tmp/tmp.QkCFRdEHTg + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.F9CXSAg4Vm +++ mktemp ++ local LAST_ERR=/tmp/tmp.zrO5sAHDem ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F9CXSAg4Vm ++ cat /tmp/tmp.zrO5sAHDem ++ rm /tmp/tmp.F9CXSAg4Vm /tmp/tmp.zrO5sAHDem ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2265-30ece6d6-6-cluster5 --namespace=users-8513 ++ mktemp + local LAST_OUT=/tmp/tmp.EitFIihIE1 ++ mktemp + local LAST_ERR=/tmp/tmp.YkLI0uNmSB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2265-30ece6d6-6-cluster5 --namespace=users-8513 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EitFIihIE1 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2265-30ece6d6-6-cluster5" modified. + cat /tmp/tmp.YkLI0uNmSB + rm /tmp/tmp.EitFIihIE1 /tmp/tmp.YkLI0uNmSB + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.M1oi8lWyAP ++ mktemp + local LAST_ERR=/tmp/tmp.nkSo8VbVnp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M1oi8lWyAP secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.nkSo8VbVnp + rm /tmp/tmp.M1oi8lWyAP /tmp/tmp.nkSo8VbVnp + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.3W9HRzioDY ++ mktemp + local LAST_ERR=/tmp/tmp.vJgJVxUHYr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3W9HRzioDY secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.vJgJVxUHYr + rm /tmp/tmp.3W9HRzioDY /tmp/tmp.vJgJVxUHYr + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/client.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: percona/proxysql3:3.0#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-8513~ + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2265-30ece6d6#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + kubectl_bin apply -f - + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + local LAST_OUT=/tmp/tmp.Dy1jwkxngi + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + local LAST_ERR=/tmp/tmp.9k9l1e4qnW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Dy1jwkxngi deployment.apps/pxc-client created + cat /tmp/tmp.9k9l1e4qnW + rm /tmp/tmp.Dy1jwkxngi /tmp/tmp.9k9l1e4qnW + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/some-name.yml ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.tG87GnKxeJ + /usr/bin/sed -e 's#image:.*-proxysql$#image: percona/proxysql3:3.0#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-8513~ + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2265-30ece6d6#' + local LAST_ERR=/tmp/tmp.Q8CHOQ9AXP + local exit_status=0 + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tG87GnKxeJ perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.Q8CHOQ9AXP + rm /tmp/tmp.tG87GnKxeJ /tmp/tmp.Q8CHOQ9AXP + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.6T3HdwZqbU ++++ mktemp +++ local LAST_ERR=/tmp/tmp.s4mtCD1HIE +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.6T3HdwZqbU +++ cat /tmp/tmp.s4mtCD1HIE +++ rm /tmp/tmp.6T3HdwZqbU /tmp/tmp.s4mtCD1HIE +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.5DxCy9fwr5 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.4hiY0v2BTL +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.5DxCy9fwr5 +++ cat /tmp/tmp.4hiY0v2BTL +++ rm /tmp/tmp.5DxCy9fwr5 /tmp/tmp.4hiY0v2BTL +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-8513 ++ mktemp + local LAST_OUT=/tmp/tmp.rlE7aFe61W ++ mktemp + local LAST_ERR=/tmp/tmp.KeINCy5NK4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-8513 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-8513 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-8513 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.rlE7aFe61W + cat /tmp/tmp.KeINCy5NK4 error: no matching resources found + rm /tmp/tmp.rlE7aFe61W /tmp/tmp.KeINCy5NK4 + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZPvFE4ywek +++ mktemp ++ local LAST_ERR=/tmp/tmp.l3XmJ8a96m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZPvFE4ywek ++ cat /tmp/tmp.l3XmJ8a96m ++ rm /tmp/tmp.ZPvFE4ywek /tmp/tmp.l3XmJ8a96m ++ return 0 + local 'root_pass=,cYiADSYc?sSCQ_mQYc' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eknHvKXUgk +++ mktemp ++ local LAST_ERR=/tmp/tmp.iSzEsW31R3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eknHvKXUgk ++ cat /tmp/tmp.iSzEsW31R3 ++ rm /tmp/tmp.eknHvKXUgk /tmp/tmp.iSzEsW31R3 ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KhKtTGbff8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kxtPXaakYI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KhKtTGbff8 ++ cat /tmp/tmp.kxtPXaakYI ++ rm /tmp/tmp.KhKtTGbff8 /tmp/tmp.kxtPXaakYI ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S0p0w1FEz2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.95llFOimQL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S0p0w1FEz2 ++ cat /tmp/tmp.95llFOimQL ++ rm /tmp/tmp.S0p0w1FEz2 /tmp/tmp.95llFOimQL ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-1.sql /tmp/tmp.nLauKsh5Ah/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rIk67XkkVP +++ mktemp ++ local LAST_ERR=/tmp/tmp.I6jfGAUTzO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rIk67XkkVP ++ cat /tmp/tmp.I6jfGAUTzO ++ rm /tmp/tmp.rIk67XkkVP /tmp/tmp.I6jfGAUTzO ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-1.sql /tmp/tmp.nLauKsh5Ah/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qtwb5wlfST +++ mktemp ++ local LAST_ERR=/tmp/tmp.iivergXO2O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qtwb5wlfST ++ cat /tmp/tmp.iivergXO2O ++ rm /tmp/tmp.qtwb5wlfST /tmp/tmp.iivergXO2O ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-1.sql /tmp/tmp.nLauKsh5Ah/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n51cZPllym +++ mktemp ++ local LAST_ERR=/tmp/tmp.iz3RcXNdf2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n51cZPllym ++ cat /tmp/tmp.iz3RcXNdf2 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.n51cZPllym /tmp/tmp.iz3RcXNdf2 ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.6YjFPiF54b +++ mktemp ++ local LAST_ERR=/tmp/tmp.HIGUvMBI48 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6YjFPiF54b ++ cat /tmp/tmp.HIGUvMBI48 ++ rm /tmp/tmp.6YjFPiF54b /tmp/tmp.HIGUvMBI48 ++ return 0 + secret_pass=',cYiADSYc?sSCQ_mQYc' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zi9b4aFFMm +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZsvGD6N6d3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zi9b4aFFMm ++ cat /tmp/tmp.ZsvGD6N6d3 ++ rm /tmp/tmp.Zi9b4aFFMm /tmp/tmp.ZsvGD6N6d3 ++ return 0 + int_secret_pass=',cYiADSYc?sSCQ_mQYc' + [[ -z ,cYiADSYc?sSCQ_mQYc ]] + [[ ,cYiADSYc?sSCQ_mQYc != \,\c\Y\i\A\D\S\Y\c\?\s\S\C\Q\_\m\Q\Y\c ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\'',cYiADSYc?sSCQ_mQYc'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xIUIBuxNXp +++ mktemp ++ local LAST_ERR=/tmp/tmp.UF9e5AKy3r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xIUIBuxNXp ++ cat /tmp/tmp.UF9e5AKy3r ++ rm /tmp/tmp.xIUIBuxNXp /tmp/tmp.UF9e5AKy3r ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.nLauKsh5Ah/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fts5RBAjq7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oJB3xrgGxi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Fts5RBAjq7 ++ cat /tmp/tmp.oJB3xrgGxi ++ rm /tmp/tmp.Fts5RBAjq7 /tmp/tmp.oJB3xrgGxi ++ return 0 + secret_pass='NURLcw-LT(K_>brS%' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.w90484Cnhf +++ mktemp ++ local LAST_ERR=/tmp/tmp.d0dAslYooS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w90484Cnhf ++ cat /tmp/tmp.d0dAslYooS ++ rm /tmp/tmp.w90484Cnhf /tmp/tmp.d0dAslYooS ++ return 0 + int_secret_pass='NURLcw-LT(K_>brS%' + [[ -z NURLcw-LT(K_>brS% ]] + [[ NURLcw-LT(K_>brS% != \N\U\R\L\c\w\-\L\T\(\K\_\>\b\r\S\% ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''NURLcw-LT(K_>brS%'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''NURLcw-LT(K_>brS%'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''NURLcw-LT(K_>brS%'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''NURLcw-LT(K_>brS%'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O8HmF9O13Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z1gEICNfO4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O8HmF9O13Z ++ cat /tmp/tmp.Z1gEICNfO4 ++ rm /tmp/tmp.O8HmF9O13Z /tmp/tmp.Z1gEICNfO4 ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.nLauKsh5Ah/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.rMZAQkVYhm +++ mktemp ++ local LAST_ERR=/tmp/tmp.w5d56piETC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rMZAQkVYhm ++ cat /tmp/tmp.w5d56piETC ++ rm /tmp/tmp.rMZAQkVYhm /tmp/tmp.w5d56piETC ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.3KPoJgDLvH +++ mktemp ++ local LAST_ERR=/tmp/tmp.kjGYe0r6Je ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3KPoJgDLvH ++ cat /tmp/tmp.kjGYe0r6Je ++ rm /tmp/tmp.3KPoJgDLvH /tmp/tmp.kjGYe0r6Je ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TKO6Gs0V6R +++ mktemp ++ local LAST_ERR=/tmp/tmp.MZBAMlbxuW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TKO6Gs0V6R ++ cat /tmp/tmp.MZBAMlbxuW ++ rm /tmp/tmp.TKO6Gs0V6R /tmp/tmp.MZBAMlbxuW ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.nLauKsh5Ah/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.L8EcfJkfgY +++ mktemp ++ local LAST_ERR=/tmp/tmp.E7BIMOE3n6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L8EcfJkfgY ++ cat /tmp/tmp.E7BIMOE3n6 ++ rm /tmp/tmp.L8EcfJkfgY /tmp/tmp.E7BIMOE3n6 ++ return 0 + secret_pass='G7DcE=_^Mo>MTtCG' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.WM8PolaHec +++ mktemp ++ local LAST_ERR=/tmp/tmp.NhyEniASZZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WM8PolaHec ++ cat /tmp/tmp.NhyEniASZZ ++ rm /tmp/tmp.WM8PolaHec /tmp/tmp.NhyEniASZZ ++ return 0 + int_secret_pass='G7DcE=_^Mo>MTtCG' + [[ -z G7DcE=_^Mo>MTtCG ]] + [[ G7DcE=_^Mo>MTtCG != \G\7\D\c\E\=\_\^\M\o\>\M\T\t\C\G ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''G7DcE=_^Mo>MTtCG'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''G7DcE=_^Mo>MTtCG'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''G7DcE=_^Mo>MTtCG'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''G7DcE=_^Mo>MTtCG'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-2.sql /tmp/tmp.nLauKsh5Ah/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.4gXR6j5VpB +++ mktemp ++ local LAST_ERR=/tmp/tmp.KEtghyZGhr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4gXR6j5VpB ++ cat /tmp/tmp.KEtghyZGhr ++ rm /tmp/tmp.4gXR6j5VpB /tmp/tmp.KEtghyZGhr ++ return 0 + secret_pass=BRFD,xIPFjvEkPhqeN ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.qM7RpO89f7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZbXhEU8RZM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qM7RpO89f7 ++ cat /tmp/tmp.ZbXhEU8RZM ++ rm /tmp/tmp.qM7RpO89f7 /tmp/tmp.ZbXhEU8RZM ++ return 0 + int_secret_pass=BRFD,xIPFjvEkPhqeN + [[ -z BRFD,xIPFjvEkPhqeN ]] + [[ BRFD,xIPFjvEkPhqeN != \B\R\F\D\,\x\I\P\F\j\v\E\k\P\h\q\e\N ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''BRFD,xIPFjvEkPhqeN'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''BRFD,xIPFjvEkPhqeN'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''BRFD,xIPFjvEkPhqeN'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''BRFD,xIPFjvEkPhqeN'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hrobo9nMK6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.otgZSEo5gr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hrobo9nMK6 ++ cat /tmp/tmp.otgZSEo5gr ++ rm /tmp/tmp.Hrobo9nMK6 /tmp/tmp.otgZSEo5gr ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.nLauKsh5Ah/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.H6bfMtvJkX +++ mktemp ++ local LAST_ERR=/tmp/tmp.H3ka4TZsX3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H6bfMtvJkX ++ cat /tmp/tmp.H3ka4TZsX3 ++ rm /tmp/tmp.H6bfMtvJkX /tmp/tmp.H3ka4TZsX3 ++ return 0 + secret_pass=')DO1Xxkw$p9.8?=Y0' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.zepg5bBKeM +++ mktemp ++ local LAST_ERR=/tmp/tmp.xWO7VQbjWq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zepg5bBKeM ++ cat /tmp/tmp.xWO7VQbjWq ++ rm /tmp/tmp.zepg5bBKeM /tmp/tmp.xWO7VQbjWq ++ return 0 + int_secret_pass=')DO1Xxkw$p9.8?=Y0' + [[ -z )DO1Xxkw$p9.8?=Y0 ]] + [[ )DO1Xxkw$p9.8?=Y0 != \)\D\O\1\X\x\k\w\$\p\9\.\8\?\=\Y\0 ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\'')DO1Xxkw$p9.8?=Y0'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\'')DO1Xxkw$p9.8?=Y0'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\'')DO1Xxkw$p9.8?=Y0'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\'')DO1Xxkw$p9.8?=Y0'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5pRlKkGLXe +++ mktemp ++ local LAST_ERR=/tmp/tmp.rKHggbX3FO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5pRlKkGLXe ++ cat /tmp/tmp.rKHggbX3FO ++ rm /tmp/tmp.5pRlKkGLXe /tmp/tmp.rKHggbX3FO ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.nLauKsh5Ah/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.VtLAxN52IN ++ mktemp + local LAST_ERR=/tmp/tmp.EuMckLYPWJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VtLAxN52IN secret/my-cluster-secrets patched + cat /tmp/tmp.EuMckLYPWJ + rm /tmp/tmp.VtLAxN52IN /tmp/tmp.EuMckLYPWJ + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mbdH8ZM3O3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.X3hzPytIoD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mbdH8ZM3O3 ++ cat /tmp/tmp.X3hzPytIoD ++ rm /tmp/tmp.mbdH8ZM3O3 /tmp/tmp.X3hzPytIoD ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.nLauKsh5Ah/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.cApBtUDZ95 ++ mktemp + local LAST_ERR=/tmp/tmp.NxHot6qwQp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cApBtUDZ95 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.NxHot6qwQp + rm /tmp/tmp.cApBtUDZ95 /tmp/tmp.NxHot6qwQp + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g2qVPHbp3r +++ mktemp ++ local LAST_ERR=/tmp/tmp.CiQt3wCQKs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g2qVPHbp3r ++ cat /tmp/tmp.CiQt3wCQKs ++ rm /tmp/tmp.g2qVPHbp3r /tmp/tmp.CiQt3wCQKs ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oQpBg0S6xj +++ mktemp ++ local LAST_ERR=/tmp/tmp.Kqs5ajGNEf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oQpBg0S6xj ++ cat /tmp/tmp.Kqs5ajGNEf ++ rm /tmp/tmp.oQpBg0S6xj /tmp/tmp.Kqs5ajGNEf ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.iy7Qes2KZp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1l87ugCXyG +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.iy7Qes2KZp +++++ cat /tmp/tmp.1l87ugCXyG +++++ rm /tmp/tmp.iy7Qes2KZp /tmp/tmp.1l87ugCXyG +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.UeV8A4gcFI ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.iry0Yw92Lq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.UeV8A4gcFI +++++ cat /tmp/tmp.iry0Yw92Lq +++++ rm /tmp/tmp.UeV8A4gcFI /tmp/tmp.iry0Yw92Lq +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kf5lAxerVZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ay31liwS07 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kf5lAxerVZ ++ cat /tmp/tmp.ay31liwS07 ++ rm /tmp/tmp.kf5lAxerVZ /tmp/tmp.ay31liwS07 ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.O1cS38YvUy ++ mktemp + local LAST_ERR=/tmp/tmp.2Xf7cz4q8d + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.O1cS38YvUy secret/my-cluster-secrets patched + cat /tmp/tmp.2Xf7cz4q8d + rm /tmp/tmp.O1cS38YvUy /tmp/tmp.2Xf7cz4q8d + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x2UEFvQMAF +++ mktemp ++ local LAST_ERR=/tmp/tmp.g6qnobsLDs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x2UEFvQMAF ++ cat /tmp/tmp.g6qnobsLDs ++ rm /tmp/tmp.x2UEFvQMAF /tmp/tmp.g6qnobsLDs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E7hlWnIxyl +++ mktemp ++ local LAST_ERR=/tmp/tmp.KRRXfPaAyI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E7hlWnIxyl ++ cat /tmp/tmp.KRRXfPaAyI ++ rm /tmp/tmp.E7hlWnIxyl /tmp/tmp.KRRXfPaAyI ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zEf5Hd9rmg +++ mktemp ++ local LAST_ERR=/tmp/tmp.IqbzP9qfOQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zEf5Hd9rmg ++ cat /tmp/tmp.IqbzP9qfOQ ++ rm /tmp/tmp.zEf5Hd9rmg /tmp/tmp.IqbzP9qfOQ ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jtvQj3gVSp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1Vzdkv4iWs +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jtvQj3gVSp +++++ cat /tmp/tmp.1Vzdkv4iWs +++++ rm /tmp/tmp.jtvQj3gVSp /tmp/tmp.1Vzdkv4iWs +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ls6lMuVIqE ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.IlwAnMztk3 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ls6lMuVIqE +++++ cat /tmp/tmp.IlwAnMztk3 +++++ rm /tmp/tmp.ls6lMuVIqE /tmp/tmp.IlwAnMztk3 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wdctKw99M6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bt6XSV49Ff ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wdctKw99M6 ++ cat /tmp/tmp.bt6XSV49Ff ++ rm /tmp/tmp.wdctKw99M6 /tmp/tmp.bt6XSV49Ff ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-2.sql /tmp/tmp.nLauKsh5Ah/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-2.sql /tmp/tmp.nLauKsh5Ah/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-2.sql /tmp/tmp.nLauKsh5Ah/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ekwJ49Yp6z ++ mktemp + local LAST_ERR=/tmp/tmp.gQmnRBit5k + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ekwJ49Yp6z perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.gQmnRBit5k + rm /tmp/tmp.ekwJ49Yp6z /tmp/tmp.gQmnRBit5k + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.fxO2dcWvmt ++ mktemp + local LAST_ERR=/tmp/tmp.5qxSQ32wWq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fxO2dcWvmt secret/my-cluster-secrets patched + cat /tmp/tmp.5qxSQ32wWq + rm /tmp/tmp.fxO2dcWvmt /tmp/tmp.5qxSQ32wWq + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6WDAHZeou7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.32PzCjSyz7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6WDAHZeou7 ++ cat /tmp/tmp.32PzCjSyz7 ++ rm /tmp/tmp.6WDAHZeou7 /tmp/tmp.32PzCjSyz7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IHv3Fweb07 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vbixgGv1Tk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IHv3Fweb07 ++ cat /tmp/tmp.vbixgGv1Tk ++ rm /tmp/tmp.IHv3Fweb07 /tmp/tmp.vbixgGv1Tk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6vasLwnD9Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.vxgdfIiUJ6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6vasLwnD9Y ++ cat /tmp/tmp.vxgdfIiUJ6 ++ rm /tmp/tmp.6vasLwnD9Y /tmp/tmp.vxgdfIiUJ6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XEPTg5r1oP +++ mktemp ++ local LAST_ERR=/tmp/tmp.dkQ3ICreEg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XEPTg5r1oP ++ cat /tmp/tmp.dkQ3ICreEg ++ rm /tmp/tmp.XEPTg5r1oP /tmp/tmp.dkQ3ICreEg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9g2cUt9Hvy +++ mktemp ++ local LAST_ERR=/tmp/tmp.s0fUrPulZ1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9g2cUt9Hvy ++ cat /tmp/tmp.s0fUrPulZ1 ++ rm /tmp/tmp.9g2cUt9Hvy /tmp/tmp.s0fUrPulZ1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E6LgSBwIwA +++ mktemp ++ local LAST_ERR=/tmp/tmp.diLVR6dqgk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E6LgSBwIwA ++ cat /tmp/tmp.diLVR6dqgk ++ rm /tmp/tmp.E6LgSBwIwA /tmp/tmp.diLVR6dqgk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y7WCB9QFzx +++ mktemp ++ local LAST_ERR=/tmp/tmp.EeAWb7eBM2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y7WCB9QFzx ++ cat /tmp/tmp.EeAWb7eBM2 ++ rm /tmp/tmp.Y7WCB9QFzx /tmp/tmp.EeAWb7eBM2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kNDnRkaXks +++ mktemp ++ local LAST_ERR=/tmp/tmp.xZBhdMrrXu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kNDnRkaXks ++ cat /tmp/tmp.xZBhdMrrXu ++ rm /tmp/tmp.kNDnRkaXks /tmp/tmp.xZBhdMrrXu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z3CsfpT9uR +++ mktemp ++ local LAST_ERR=/tmp/tmp.uJ7kvpcc7w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z3CsfpT9uR ++ cat /tmp/tmp.uJ7kvpcc7w ++ rm /tmp/tmp.z3CsfpT9uR /tmp/tmp.uJ7kvpcc7w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rBeSm5YLWR +++ mktemp ++ local LAST_ERR=/tmp/tmp.HHeBcVy6tI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rBeSm5YLWR ++ cat /tmp/tmp.HHeBcVy6tI ++ rm /tmp/tmp.rBeSm5YLWR /tmp/tmp.HHeBcVy6tI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iccHJZsRvf +++ mktemp ++ local LAST_ERR=/tmp/tmp.BWH4KKzC3t ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iccHJZsRvf ++ cat /tmp/tmp.BWH4KKzC3t ++ rm /tmp/tmp.iccHJZsRvf /tmp/tmp.BWH4KKzC3t ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9goaGDS4ry +++ mktemp ++ local LAST_ERR=/tmp/tmp.dcEEVVGBZp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9goaGDS4ry ++ cat /tmp/tmp.dcEEVVGBZp ++ rm /tmp/tmp.9goaGDS4ry /tmp/tmp.dcEEVVGBZp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OZfIrAdneg +++ mktemp ++ local LAST_ERR=/tmp/tmp.ogNZByn4xM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OZfIrAdneg ++ cat /tmp/tmp.ogNZByn4xM ++ rm /tmp/tmp.OZfIrAdneg /tmp/tmp.ogNZByn4xM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JuGGlnVBgL +++ mktemp ++ local LAST_ERR=/tmp/tmp.InyF79xlsk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JuGGlnVBgL ++ cat /tmp/tmp.InyF79xlsk ++ rm /tmp/tmp.JuGGlnVBgL /tmp/tmp.InyF79xlsk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZSl1Et9TTP +++ mktemp ++ local LAST_ERR=/tmp/tmp.6tcqB4rw0H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZSl1Et9TTP ++ cat /tmp/tmp.6tcqB4rw0H ++ rm /tmp/tmp.ZSl1Et9TTP /tmp/tmp.6tcqB4rw0H ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YAWiv3aMw3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rEM2nrEZmQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YAWiv3aMw3 ++ cat /tmp/tmp.rEM2nrEZmQ ++ rm /tmp/tmp.YAWiv3aMw3 /tmp/tmp.rEM2nrEZmQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pI6s1xaU80 +++ mktemp ++ local LAST_ERR=/tmp/tmp.z6sLI9T4dy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pI6s1xaU80 ++ cat /tmp/tmp.z6sLI9T4dy ++ rm /tmp/tmp.pI6s1xaU80 /tmp/tmp.z6sLI9T4dy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z0uysHx22H +++ mktemp ++ local LAST_ERR=/tmp/tmp.1fSCz05yCV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z0uysHx22H ++ cat /tmp/tmp.1fSCz05yCV ++ rm /tmp/tmp.Z0uysHx22H /tmp/tmp.1fSCz05yCV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ea9h9x4Ykc +++ mktemp ++ local LAST_ERR=/tmp/tmp.TFrwPMjU5M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ea9h9x4Ykc ++ cat /tmp/tmp.TFrwPMjU5M ++ rm /tmp/tmp.Ea9h9x4Ykc /tmp/tmp.TFrwPMjU5M ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GHaomBergw +++ mktemp ++ local LAST_ERR=/tmp/tmp.dPyNJVsbbS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GHaomBergw ++ cat /tmp/tmp.dPyNJVsbbS ++ rm /tmp/tmp.GHaomBergw /tmp/tmp.dPyNJVsbbS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bAtqe884jn +++ mktemp ++ local LAST_ERR=/tmp/tmp.cEbNfqYjoT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bAtqe884jn ++ cat /tmp/tmp.cEbNfqYjoT ++ rm /tmp/tmp.bAtqe884jn /tmp/tmp.cEbNfqYjoT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hBQQGdCLEd +++ mktemp ++ local LAST_ERR=/tmp/tmp.SqF4pWLSmm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hBQQGdCLEd ++ cat /tmp/tmp.SqF4pWLSmm ++ rm /tmp/tmp.hBQQGdCLEd /tmp/tmp.SqF4pWLSmm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5ElsqUFdBF +++ mktemp ++ local LAST_ERR=/tmp/tmp.hQFamSc5Rw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5ElsqUFdBF ++ cat /tmp/tmp.hQFamSc5Rw ++ rm /tmp/tmp.5ElsqUFdBF /tmp/tmp.hQFamSc5Rw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zHz7suZXoY +++ mktemp ++ local LAST_ERR=/tmp/tmp.8g68F0nqKG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zHz7suZXoY ++ cat /tmp/tmp.8g68F0nqKG ++ rm /tmp/tmp.zHz7suZXoY /tmp/tmp.8g68F0nqKG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xso1fHOX3S +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rr9mKELGWD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xso1fHOX3S ++ cat /tmp/tmp.Rr9mKELGWD ++ rm /tmp/tmp.xso1fHOX3S /tmp/tmp.Rr9mKELGWD ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kJLTBBolxX +++ mktemp ++ local LAST_ERR=/tmp/tmp.JLaYl13w6X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kJLTBBolxX ++ cat /tmp/tmp.JLaYl13w6X ++ rm /tmp/tmp.kJLTBBolxX /tmp/tmp.JLaYl13w6X ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.yvayaLn4Gw ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.5HcRqsRYNb +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.yvayaLn4Gw +++++ cat /tmp/tmp.5HcRqsRYNb +++++ rm /tmp/tmp.yvayaLn4Gw /tmp/tmp.5HcRqsRYNb +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mRrUGoMRua ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.PnCkMtinjA +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mRrUGoMRua +++++ cat /tmp/tmp.PnCkMtinjA +++++ rm /tmp/tmp.mRrUGoMRua /tmp/tmp.PnCkMtinjA +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N5rIDTQJJk +++ mktemp ++ local LAST_ERR=/tmp/tmp.sj7l7rZrA3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N5rIDTQJJk ++ cat /tmp/tmp.sj7l7rZrA3 ++ rm /tmp/tmp.N5rIDTQJJk /tmp/tmp.sj7l7rZrA3 ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-3-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-3.sql /tmp/tmp.nLauKsh5Ah/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.BUXvSU4v0r ++ mktemp + local LAST_ERR=/tmp/tmp.1Bb7a1ktof + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BUXvSU4v0r secret/my-cluster-secrets patched + cat /tmp/tmp.1Bb7a1ktof + rm /tmp/tmp.BUXvSU4v0r /tmp/tmp.1Bb7a1ktof + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.2B74QWfA1r +++ mktemp ++ local LAST_ERR=/tmp/tmp.hZ1nBq5QNK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2B74QWfA1r ++ cat /tmp/tmp.hZ1nBq5QNK ++ rm /tmp/tmp.2B74QWfA1r /tmp/tmp.hZ1nBq5QNK ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v3PghOoYDp +++ mktemp ++ local LAST_ERR=/tmp/tmp.0NRh7xpChj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v3PghOoYDp ++ cat /tmp/tmp.0NRh7xpChj ++ rm /tmp/tmp.v3PghOoYDp /tmp/tmp.0NRh7xpChj ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace {"additional_password": "$A$005$ y[0vO\\t\\u001dS~@q6\\u00142%7Gh\\u000fJ0Efa6g7O7ydAGwakuNyjBm1TDgBpmuLtWbZuXERLsB"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wW78JQzEaB +++ mktemp ++ local LAST_ERR=/tmp/tmp.e0yr3uPLYx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wW78JQzEaB ++ cat /tmp/tmp.e0yr3uPLYx ++ rm /tmp/tmp.wW78JQzEaB /tmp/tmp.e0yr3uPLYx ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7SZhzDCVa1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oz5OqfCbyJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7SZhzDCVa1 ++ cat /tmp/tmp.oz5OqfCbyJ ++ rm /tmp/tmp.7SZhzDCVa1 /tmp/tmp.oz5OqfCbyJ ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fkzijo2nDL +++ mktemp ++ local LAST_ERR=/tmp/tmp.ic7zu1dpcY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fkzijo2nDL ++ cat /tmp/tmp.ic7zu1dpcY ++ rm /tmp/tmp.fkzijo2nDL /tmp/tmp.ic7zu1dpcY ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.awl22L5gNB +++ mktemp ++ local LAST_ERR=/tmp/tmp.Uaaw1q2XMg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.awl22L5gNB ++ cat /tmp/tmp.Uaaw1q2XMg ++ rm /tmp/tmp.awl22L5gNB /tmp/tmp.Uaaw1q2XMg ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S4LCxmWrYS +++ mktemp ++ local LAST_ERR=/tmp/tmp.6vZEInpWEk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S4LCxmWrYS ++ cat /tmp/tmp.6vZEInpWEk ++ rm /tmp/tmp.S4LCxmWrYS /tmp/tmp.6vZEInpWEk ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1nzurTx4jW +++ mktemp ++ local LAST_ERR=/tmp/tmp.EKwmGkgGLX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1nzurTx4jW ++ cat /tmp/tmp.EKwmGkgGLX ++ rm /tmp/tmp.1nzurTx4jW /tmp/tmp.EKwmGkgGLX ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bqcpFSsbDN +++ mktemp ++ local LAST_ERR=/tmp/tmp.LyRXFWsPVI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bqcpFSsbDN ++ cat /tmp/tmp.LyRXFWsPVI ++ rm /tmp/tmp.bqcpFSsbDN /tmp/tmp.LyRXFWsPVI ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W5RBbVb08k +++ mktemp ++ local LAST_ERR=/tmp/tmp.J1y1WRGCBd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W5RBbVb08k ++ cat /tmp/tmp.J1y1WRGCBd ++ rm /tmp/tmp.W5RBbVb08k /tmp/tmp.J1y1WRGCBd ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NhPscdw8PA +++ mktemp ++ local LAST_ERR=/tmp/tmp.3x6V787Qzd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NhPscdw8PA ++ cat /tmp/tmp.3x6V787Qzd ++ rm /tmp/tmp.NhPscdw8PA /tmp/tmp.3x6V787Qzd ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xjkGOfV4XX +++ mktemp ++ local LAST_ERR=/tmp/tmp.94NgWc5s91 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xjkGOfV4XX ++ cat /tmp/tmp.94NgWc5s91 ++ rm /tmp/tmp.xjkGOfV4XX /tmp/tmp.94NgWc5s91 ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4P3KzMuc7i +++ mktemp ++ local LAST_ERR=/tmp/tmp.hsf21GxKbp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4P3KzMuc7i ++ cat /tmp/tmp.hsf21GxKbp ++ rm /tmp/tmp.4P3KzMuc7i /tmp/tmp.hsf21GxKbp ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4valWq1a4z +++ mktemp ++ local LAST_ERR=/tmp/tmp.avalJ3muBx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4valWq1a4z ++ cat /tmp/tmp.avalJ3muBx ++ rm /tmp/tmp.4valWq1a4z /tmp/tmp.avalJ3muBx ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VlTzerxRSA +++ mktemp ++ local LAST_ERR=/tmp/tmp.0M9OYTMMDk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VlTzerxRSA ++ cat /tmp/tmp.0M9OYTMMDk ++ rm /tmp/tmp.VlTzerxRSA /tmp/tmp.0M9OYTMMDk ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y1wINGfQRR +++ mktemp ++ local LAST_ERR=/tmp/tmp.O7D32pr5AO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y1wINGfQRR ++ cat /tmp/tmp.O7D32pr5AO ++ rm /tmp/tmp.y1wINGfQRR /tmp/tmp.O7D32pr5AO ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qwe8jmJg3l +++ mktemp ++ local LAST_ERR=/tmp/tmp.ojgnV0kItu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qwe8jmJg3l ++ cat /tmp/tmp.ojgnV0kItu ++ rm /tmp/tmp.Qwe8jmJg3l /tmp/tmp.ojgnV0kItu ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.zcHCP7dOnb ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jOEmK91mlm +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.zcHCP7dOnb +++++ cat /tmp/tmp.jOEmK91mlm +++++ rm /tmp/tmp.zcHCP7dOnb /tmp/tmp.jOEmK91mlm +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.wcPCgkvQ8B ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.VySGUhNstx +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.wcPCgkvQ8B +++++ cat /tmp/tmp.VySGUhNstx +++++ rm /tmp/tmp.wcPCgkvQ8B /tmp/tmp.VySGUhNstx +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CGDq0j638b +++ mktemp ++ local LAST_ERR=/tmp/tmp.2ZhI8nXRpI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CGDq0j638b ++ cat /tmp/tmp.2ZhI8nXRpI ++ rm /tmp/tmp.CGDq0j638b /tmp/tmp.2ZhI8nXRpI ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ldFOWXttOH +++ mktemp ++ local LAST_ERR=/tmp/tmp.2RPn0fIqe7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ldFOWXttOH ++ cat /tmp/tmp.2RPn0fIqe7 ++ rm /tmp/tmp.ldFOWXttOH /tmp/tmp.2RPn0fIqe7 ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.nLauKsh5Ah/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Th5wzzgjcR ++ mktemp + local LAST_ERR=/tmp/tmp.ucZC17lu46 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Th5wzzgjcR secret/my-cluster-secrets patched + cat /tmp/tmp.ucZC17lu46 + rm /tmp/tmp.Th5wzzgjcR /tmp/tmp.ucZC17lu46 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4tYWQxd9U3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EFcVbJqBCZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4tYWQxd9U3 ++ cat /tmp/tmp.EFcVbJqBCZ ++ rm /tmp/tmp.4tYWQxd9U3 /tmp/tmp.EFcVbJqBCZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cSeXgqkSyj +++ mktemp ++ local LAST_ERR=/tmp/tmp.KKZZ9CoK8I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cSeXgqkSyj ++ cat /tmp/tmp.KKZZ9CoK8I ++ rm /tmp/tmp.cSeXgqkSyj /tmp/tmp.KKZZ9CoK8I ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bnmgs3h7pl +++ mktemp ++ local LAST_ERR=/tmp/tmp.jr3CphQ9Vo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bnmgs3h7pl ++ cat /tmp/tmp.jr3CphQ9Vo ++ rm /tmp/tmp.bnmgs3h7pl /tmp/tmp.jr3CphQ9Vo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oVR6nav4W0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3VLf3phnX8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oVR6nav4W0 ++ cat /tmp/tmp.3VLf3phnX8 ++ rm /tmp/tmp.oVR6nav4W0 /tmp/tmp.3VLf3phnX8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vQXTgYTxXm +++ mktemp ++ local LAST_ERR=/tmp/tmp.HYBpnCblmP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vQXTgYTxXm ++ cat /tmp/tmp.HYBpnCblmP ++ rm /tmp/tmp.vQXTgYTxXm /tmp/tmp.HYBpnCblmP ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mppmo5bEng +++ mktemp ++ local LAST_ERR=/tmp/tmp.HCVb8L1VmK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Mppmo5bEng ++ cat /tmp/tmp.HCVb8L1VmK ++ rm /tmp/tmp.Mppmo5bEng /tmp/tmp.HCVb8L1VmK ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.9jo4F5FyJr ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.00qefU7r2l +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.9jo4F5FyJr +++++ cat /tmp/tmp.00qefU7r2l +++++ rm /tmp/tmp.9jo4F5FyJr /tmp/tmp.00qefU7r2l +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.n1MgPfzRFq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.FRWpY3Ytrx +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.n1MgPfzRFq +++++ cat /tmp/tmp.FRWpY3Ytrx +++++ rm /tmp/tmp.n1MgPfzRFq /tmp/tmp.FRWpY3Ytrx +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w9ALi9yPkD +++ mktemp ++ local LAST_ERR=/tmp/tmp.pMatvOvRPs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w9ALi9yPkD ++ cat /tmp/tmp.pMatvOvRPs ++ rm /tmp/tmp.w9ALi9yPkD /tmp/tmp.pMatvOvRPs ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DuwjaV38OS +++ mktemp ++ local LAST_ERR=/tmp/tmp.2slDvAY7OZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DuwjaV38OS ++ cat /tmp/tmp.2slDvAY7OZ ++ rm /tmp/tmp.DuwjaV38OS /tmp/tmp.2slDvAY7OZ ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.nLauKsh5Ah/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.EQ68xpb4MC ++ mktemp + local LAST_ERR=/tmp/tmp.AqatMlC9ft + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EQ68xpb4MC perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.AqatMlC9ft + rm /tmp/tmp.EQ68xpb4MC /tmp/tmp.AqatMlC9ft + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Az4DrCXqoT +++ mktemp ++ local LAST_ERR=/tmp/tmp.R1ROBsz1F7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Az4DrCXqoT ++ cat /tmp/tmp.R1ROBsz1F7 ++ rm /tmp/tmp.Az4DrCXqoT /tmp/tmp.R1ROBsz1F7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iu155XzYia +++ mktemp ++ local LAST_ERR=/tmp/tmp.CViTOwvMUK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iu155XzYia ++ cat /tmp/tmp.CViTOwvMUK ++ rm /tmp/tmp.iu155XzYia /tmp/tmp.CViTOwvMUK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X8SKE9xoUs +++ mktemp ++ local LAST_ERR=/tmp/tmp.4gxJFedIwF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X8SKE9xoUs ++ cat /tmp/tmp.4gxJFedIwF ++ rm /tmp/tmp.X8SKE9xoUs /tmp/tmp.4gxJFedIwF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eg0NgcWFIb +++ mktemp ++ local LAST_ERR=/tmp/tmp.2uA63w57Uj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eg0NgcWFIb ++ cat /tmp/tmp.2uA63w57Uj ++ rm /tmp/tmp.eg0NgcWFIb /tmp/tmp.2uA63w57Uj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bof9g4grIr +++ mktemp ++ local LAST_ERR=/tmp/tmp.eKVaxAwcfa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bof9g4grIr ++ cat /tmp/tmp.eKVaxAwcfa ++ rm /tmp/tmp.bof9g4grIr /tmp/tmp.eKVaxAwcfa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fugMMnMf0m +++ mktemp ++ local LAST_ERR=/tmp/tmp.SmEXBIzTNQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fugMMnMf0m ++ cat /tmp/tmp.SmEXBIzTNQ ++ rm /tmp/tmp.fugMMnMf0m /tmp/tmp.SmEXBIzTNQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r9BoMlPy2q +++ mktemp ++ local LAST_ERR=/tmp/tmp.W6cTSx5OeU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r9BoMlPy2q ++ cat /tmp/tmp.W6cTSx5OeU ++ rm /tmp/tmp.r9BoMlPy2q /tmp/tmp.W6cTSx5OeU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cYbMmkDfYA +++ mktemp ++ local LAST_ERR=/tmp/tmp.vGZybqTz4w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cYbMmkDfYA ++ cat /tmp/tmp.vGZybqTz4w ++ rm /tmp/tmp.cYbMmkDfYA /tmp/tmp.vGZybqTz4w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HMSwja6ZNr +++ mktemp ++ local LAST_ERR=/tmp/tmp.bVIsRM2Czp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HMSwja6ZNr ++ cat /tmp/tmp.bVIsRM2Czp ++ rm /tmp/tmp.HMSwja6ZNr /tmp/tmp.bVIsRM2Czp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.91KsBC5ufj +++ mktemp ++ local LAST_ERR=/tmp/tmp.X2yu6oofmb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.91KsBC5ufj ++ cat /tmp/tmp.X2yu6oofmb ++ rm /tmp/tmp.91KsBC5ufj /tmp/tmp.X2yu6oofmb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cHcQweNGfH +++ mktemp ++ local LAST_ERR=/tmp/tmp.2a4Wf07hlq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cHcQweNGfH ++ cat /tmp/tmp.2a4Wf07hlq ++ rm /tmp/tmp.cHcQweNGfH /tmp/tmp.2a4Wf07hlq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ECVva6izR1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hm40f7KwdZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ECVva6izR1 ++ cat /tmp/tmp.hm40f7KwdZ ++ rm /tmp/tmp.ECVva6izR1 /tmp/tmp.hm40f7KwdZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YmjeE7cii7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.TtEQKQNpXr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YmjeE7cii7 ++ cat /tmp/tmp.TtEQKQNpXr ++ rm /tmp/tmp.YmjeE7cii7 /tmp/tmp.TtEQKQNpXr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9zfxiwdWzu +++ mktemp ++ local LAST_ERR=/tmp/tmp.mgvt2a1EN1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9zfxiwdWzu ++ cat /tmp/tmp.mgvt2a1EN1 ++ rm /tmp/tmp.9zfxiwdWzu /tmp/tmp.mgvt2a1EN1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KjS63oFLJy +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZwS8Pebs3J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KjS63oFLJy ++ cat /tmp/tmp.ZwS8Pebs3J ++ rm /tmp/tmp.KjS63oFLJy /tmp/tmp.ZwS8Pebs3J ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6IvoEGKgFG +++ mktemp ++ local LAST_ERR=/tmp/tmp.LD3UETyLAA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6IvoEGKgFG ++ cat /tmp/tmp.LD3UETyLAA ++ rm /tmp/tmp.6IvoEGKgFG /tmp/tmp.LD3UETyLAA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lvMep7J9IJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.b9B3gzAblQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lvMep7J9IJ ++ cat /tmp/tmp.b9B3gzAblQ ++ rm /tmp/tmp.lvMep7J9IJ /tmp/tmp.b9B3gzAblQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GEdsfAkQjv +++ mktemp ++ local LAST_ERR=/tmp/tmp.0ZpSDlyNGr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GEdsfAkQjv ++ cat /tmp/tmp.0ZpSDlyNGr ++ rm /tmp/tmp.GEdsfAkQjv /tmp/tmp.0ZpSDlyNGr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v7sZNKD6Wg +++ mktemp ++ local LAST_ERR=/tmp/tmp.CQFl7C3I9M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v7sZNKD6Wg ++ cat /tmp/tmp.CQFl7C3I9M ++ rm /tmp/tmp.v7sZNKD6Wg /tmp/tmp.CQFl7C3I9M ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xiRt6pjYuO +++ mktemp ++ local LAST_ERR=/tmp/tmp.njYMjRVLpE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xiRt6pjYuO ++ cat /tmp/tmp.njYMjRVLpE ++ rm /tmp/tmp.xiRt6pjYuO /tmp/tmp.njYMjRVLpE ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4v4QlIrT4y +++ mktemp ++ local LAST_ERR=/tmp/tmp.MxQead63QI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4v4QlIrT4y ++ cat /tmp/tmp.MxQead63QI ++ rm /tmp/tmp.4v4QlIrT4y /tmp/tmp.MxQead63QI ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.G8uIOg2gG0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Y5NI3mdwZz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.G8uIOg2gG0 +++++ cat /tmp/tmp.Y5NI3mdwZz +++++ rm /tmp/tmp.G8uIOg2gG0 /tmp/tmp.Y5NI3mdwZz +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.0b0Ytwffwj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.g3Fu0Ah1IU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.0b0Ytwffwj +++++ cat /tmp/tmp.g3Fu0Ah1IU +++++ rm /tmp/tmp.0b0Ytwffwj /tmp/tmp.g3Fu0Ah1IU +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w26eastuV1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1DqVt995eQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w26eastuV1 ++ cat /tmp/tmp.1DqVt995eQ ++ rm /tmp/tmp.w26eastuV1 /tmp/tmp.1DqVt995eQ ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.fpqqJ6BzuK ++ mktemp + local LAST_ERR=/tmp/tmp.W2n9rPMLDH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fpqqJ6BzuK secret/my-cluster-secrets-2 patched + cat /tmp/tmp.W2n9rPMLDH + rm /tmp/tmp.fpqqJ6BzuK /tmp/tmp.W2n9rPMLDH + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dkJRKqb7mB +++ mktemp ++ local LAST_ERR=/tmp/tmp.jCWrUXXoDV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dkJRKqb7mB ++ cat /tmp/tmp.jCWrUXXoDV ++ rm /tmp/tmp.dkJRKqb7mB /tmp/tmp.jCWrUXXoDV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z46tfvUMPS +++ mktemp ++ local LAST_ERR=/tmp/tmp.3PEYUrnCtt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z46tfvUMPS ++ cat /tmp/tmp.3PEYUrnCtt ++ rm /tmp/tmp.Z46tfvUMPS /tmp/tmp.3PEYUrnCtt ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rFqCnGNyME +++ mktemp ++ local LAST_ERR=/tmp/tmp.QISv4Nlh3e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rFqCnGNyME ++ cat /tmp/tmp.QISv4Nlh3e ++ rm /tmp/tmp.rFqCnGNyME /tmp/tmp.QISv4Nlh3e ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AsYIjXbBB6 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.QZIYrcefg0 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AsYIjXbBB6 +++++ cat /tmp/tmp.QZIYrcefg0 +++++ rm /tmp/tmp.AsYIjXbBB6 /tmp/tmp.QZIYrcefg0 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KdiTM0lpgB ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.HbXnus1ftt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KdiTM0lpgB +++++ cat /tmp/tmp.HbXnus1ftt +++++ rm /tmp/tmp.KdiTM0lpgB /tmp/tmp.HbXnus1ftt +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Iz5iTaFACE +++ mktemp ++ local LAST_ERR=/tmp/tmp.oLLzdwVqae ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Iz5iTaFACE ++ cat /tmp/tmp.oLLzdwVqae ++ rm /tmp/tmp.Iz5iTaFACE /tmp/tmp.oLLzdwVqae ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WRB6G9bLe8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qrPxOCLVRm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WRB6G9bLe8 ++ cat /tmp/tmp.qrPxOCLVRm ++ rm /tmp/tmp.WRB6G9bLe8 /tmp/tmp.qrPxOCLVRm ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.nLauKsh5Ah/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' +++ mktemp ++ base64 --decode ++ local LAST_OUT=/tmp/tmp.uyEQHDa94h +++ mktemp ++ local LAST_ERR=/tmp/tmp.ivJROpAiYl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uyEQHDa94h ++ cat /tmp/tmp.ivJROpAiYl ++ rm /tmp/tmp.uyEQHDa94h /tmp/tmp.ivJROpAiYl ++ return 0 + newpass='1IqU}7Z^=YqD-vpihp' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''1IqU}7Z^=YqD-vpihp'\'';' '-h some-name-pxc -uroot -p'\''1IqU}7Z^=YqD-vpihp'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''1IqU}7Z^=YqD-vpihp'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''1IqU}7Z^=YqD-vpihp'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.riGoWt3no7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.o2x4FWlTnl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.riGoWt3no7 ++ cat /tmp/tmp.o2x4FWlTnl ++ rm /tmp/tmp.riGoWt3no7 /tmp/tmp.o2x4FWlTnl ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''1IqU}7Z^=YqD-vpihp'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''1IqU}7Z^=YqD-vpihp'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''1IqU}7Z^=YqD-vpihp'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''1IqU}7Z^=YqD-vpihp'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2CrHzlJNFw +++ mktemp ++ local LAST_ERR=/tmp/tmp.AsRtWgvJ4C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2CrHzlJNFw ++ cat /tmp/tmp.AsRtWgvJ4C ++ rm /tmp/tmp.2CrHzlJNFw /tmp/tmp.AsRtWgvJ4C ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.nLauKsh5Ah/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.nI0Pxc7lYM +++ mktemp ++ local LAST_ERR=/tmp/tmp.uw0HZvaZKu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nI0Pxc7lYM ++ cat /tmp/tmp.uw0HZvaZKu ++ rm /tmp/tmp.nI0Pxc7lYM /tmp/tmp.uw0HZvaZKu ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.EdNrksAXnl ++ mktemp + local LAST_ERR=/tmp/tmp.SDGRYOaIXr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EdNrksAXnl secret/my-cluster-secrets-2 configured + cat /tmp/tmp.SDGRYOaIXr Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.EdNrksAXnl /tmp/tmp.SDGRYOaIXr + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZNnYuLEqv1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.P8sHM72W4K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZNnYuLEqv1 ++ cat /tmp/tmp.P8sHM72W4K ++ rm /tmp/tmp.ZNnYuLEqv1 /tmp/tmp.P8sHM72W4K ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.nLauKsh5Ah/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2265-30ece6d6#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_OUT=/tmp/tmp.b2Rl0sLum0 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: percona/proxysql3:3.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-8513~ ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/conf/some-name.yml + local LAST_ERR=/tmp/tmp.vmHWi9yOKW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.b2Rl0sLum0 perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.vmHWi9yOKW + rm /tmp/tmp.b2Rl0sLum0 /tmp/tmp.vmHWi9yOKW + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sOh512IIrK +++ mktemp ++ local LAST_ERR=/tmp/tmp.63oaCnVAYX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sOh512IIrK ++ cat /tmp/tmp.63oaCnVAYX ++ rm /tmp/tmp.sOh512IIrK /tmp/tmp.63oaCnVAYX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tR3YJFqBAb +++ mktemp ++ local LAST_ERR=/tmp/tmp.fayDKYOaND ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tR3YJFqBAb ++ cat /tmp/tmp.fayDKYOaND ++ rm /tmp/tmp.tR3YJFqBAb /tmp/tmp.fayDKYOaND ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dqAsKBVwlL +++ mktemp ++ local LAST_ERR=/tmp/tmp.5kSIpE9jm5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dqAsKBVwlL ++ cat /tmp/tmp.5kSIpE9jm5 ++ rm /tmp/tmp.dqAsKBVwlL /tmp/tmp.5kSIpE9jm5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IShd48JUDP +++ mktemp ++ local LAST_ERR=/tmp/tmp.ycptLFkiou ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IShd48JUDP ++ cat /tmp/tmp.ycptLFkiou ++ rm /tmp/tmp.IShd48JUDP /tmp/tmp.ycptLFkiou ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8Sq9n3pbf3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PqHAoBevXY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8Sq9n3pbf3 ++ cat /tmp/tmp.PqHAoBevXY ++ rm /tmp/tmp.8Sq9n3pbf3 /tmp/tmp.PqHAoBevXY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9Z4gURg1YJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.qqBk4zxnof ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9Z4gURg1YJ ++ cat /tmp/tmp.qqBk4zxnof ++ rm /tmp/tmp.9Z4gURg1YJ /tmp/tmp.qqBk4zxnof ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d4yyXEwtco +++ mktemp ++ local LAST_ERR=/tmp/tmp.XYfRKDS7QT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d4yyXEwtco ++ cat /tmp/tmp.XYfRKDS7QT ++ rm /tmp/tmp.d4yyXEwtco /tmp/tmp.XYfRKDS7QT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j0XSSKwPX4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.w3KEaU8uzk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j0XSSKwPX4 ++ cat /tmp/tmp.w3KEaU8uzk ++ rm /tmp/tmp.j0XSSKwPX4 /tmp/tmp.w3KEaU8uzk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q6bnIOQ6c8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Db7S8txOSE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q6bnIOQ6c8 ++ cat /tmp/tmp.Db7S8txOSE ++ rm /tmp/tmp.q6bnIOQ6c8 /tmp/tmp.Db7S8txOSE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CbaVGyFktw +++ mktemp ++ local LAST_ERR=/tmp/tmp.JR3QSHkq7i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CbaVGyFktw ++ cat /tmp/tmp.JR3QSHkq7i ++ rm /tmp/tmp.CbaVGyFktw /tmp/tmp.JR3QSHkq7i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GyElfgYRLt +++ mktemp ++ local LAST_ERR=/tmp/tmp.H51Y8DvD8N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GyElfgYRLt ++ cat /tmp/tmp.H51Y8DvD8N ++ rm /tmp/tmp.GyElfgYRLt /tmp/tmp.H51Y8DvD8N ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vt6uis16as +++ mktemp ++ local LAST_ERR=/tmp/tmp.ssXkcQEH7D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vt6uis16as ++ cat /tmp/tmp.ssXkcQEH7D ++ rm /tmp/tmp.Vt6uis16as /tmp/tmp.ssXkcQEH7D ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.opxhho0R7a +++ mktemp ++ local LAST_ERR=/tmp/tmp.KYQVjMrBCb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.opxhho0R7a ++ cat /tmp/tmp.KYQVjMrBCb ++ rm /tmp/tmp.opxhho0R7a /tmp/tmp.KYQVjMrBCb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jBsahYN30p +++ mktemp ++ local LAST_ERR=/tmp/tmp.0IHh07ZBLl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jBsahYN30p ++ cat /tmp/tmp.0IHh07ZBLl ++ rm /tmp/tmp.jBsahYN30p /tmp/tmp.0IHh07ZBLl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lgW0AwdzXD +++ mktemp ++ local LAST_ERR=/tmp/tmp.p7Llw2x51U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lgW0AwdzXD ++ cat /tmp/tmp.p7Llw2x51U ++ rm /tmp/tmp.lgW0AwdzXD /tmp/tmp.p7Llw2x51U ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KQmdFGcCHk +++ mktemp ++ local LAST_ERR=/tmp/tmp.82K6uAFJUv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KQmdFGcCHk ++ cat /tmp/tmp.82K6uAFJUv ++ rm /tmp/tmp.KQmdFGcCHk /tmp/tmp.82K6uAFJUv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AtDCarmrRm +++ mktemp ++ local LAST_ERR=/tmp/tmp.Urp1yBY8RK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AtDCarmrRm ++ cat /tmp/tmp.Urp1yBY8RK ++ rm /tmp/tmp.AtDCarmrRm /tmp/tmp.Urp1yBY8RK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UnbxneqQNY +++ mktemp ++ local LAST_ERR=/tmp/tmp.WQ5Glqn1ad ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UnbxneqQNY ++ cat /tmp/tmp.WQ5Glqn1ad ++ rm /tmp/tmp.UnbxneqQNY /tmp/tmp.WQ5Glqn1ad ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yMp7mhBno2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6JVec1pDIQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yMp7mhBno2 ++ cat /tmp/tmp.6JVec1pDIQ ++ rm /tmp/tmp.yMp7mhBno2 /tmp/tmp.6JVec1pDIQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zTOXZBvWCC +++ mktemp ++ local LAST_ERR=/tmp/tmp.fXouOJANH6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zTOXZBvWCC ++ cat /tmp/tmp.fXouOJANH6 ++ rm /tmp/tmp.zTOXZBvWCC /tmp/tmp.fXouOJANH6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W9xjrAyILw +++ mktemp ++ local LAST_ERR=/tmp/tmp.SoRR3nv1I1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W9xjrAyILw ++ cat /tmp/tmp.SoRR3nv1I1 ++ rm /tmp/tmp.W9xjrAyILw /tmp/tmp.SoRR3nv1I1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DS1YjcVzBS +++ mktemp ++ local LAST_ERR=/tmp/tmp.AbWOYx6yBc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DS1YjcVzBS ++ cat /tmp/tmp.AbWOYx6yBc ++ rm /tmp/tmp.DS1YjcVzBS /tmp/tmp.AbWOYx6yBc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2sposrUSn7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BKYCOMjThL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2sposrUSn7 ++ cat /tmp/tmp.BKYCOMjThL ++ rm /tmp/tmp.2sposrUSn7 /tmp/tmp.BKYCOMjThL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fKwvUF96TC +++ mktemp ++ local LAST_ERR=/tmp/tmp.qOdJTddLTX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fKwvUF96TC ++ cat /tmp/tmp.qOdJTddLTX ++ rm /tmp/tmp.fKwvUF96TC /tmp/tmp.qOdJTddLTX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DAaG7Eu8BP +++ mktemp ++ local LAST_ERR=/tmp/tmp.ToXtHVmesg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DAaG7Eu8BP ++ cat /tmp/tmp.ToXtHVmesg ++ rm /tmp/tmp.DAaG7Eu8BP /tmp/tmp.ToXtHVmesg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SSNwLI7L7C +++ mktemp ++ local LAST_ERR=/tmp/tmp.T1qq9ggqrR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SSNwLI7L7C ++ cat /tmp/tmp.T1qq9ggqrR ++ rm /tmp/tmp.SSNwLI7L7C /tmp/tmp.T1qq9ggqrR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8K8aJNw62f +++ mktemp ++ local LAST_ERR=/tmp/tmp.iO4ZrZyQBt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8K8aJNw62f ++ cat /tmp/tmp.iO4ZrZyQBt ++ rm /tmp/tmp.8K8aJNw62f /tmp/tmp.iO4ZrZyQBt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z3eTctAKiR +++ mktemp ++ local LAST_ERR=/tmp/tmp.jRKO5pPeOq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z3eTctAKiR ++ cat /tmp/tmp.jRKO5pPeOq ++ rm /tmp/tmp.z3eTctAKiR /tmp/tmp.jRKO5pPeOq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z39i2ADzqv +++ mktemp ++ local LAST_ERR=/tmp/tmp.Li6sShF1u4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z39i2ADzqv ++ cat /tmp/tmp.Li6sShF1u4 ++ rm /tmp/tmp.Z39i2ADzqv /tmp/tmp.Li6sShF1u4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j3wS3Dwc5G +++ mktemp ++ local LAST_ERR=/tmp/tmp.D7cxxcaO1j ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j3wS3Dwc5G ++ cat /tmp/tmp.D7cxxcaO1j ++ rm /tmp/tmp.j3wS3Dwc5G /tmp/tmp.D7cxxcaO1j ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LELxfFtJRD +++ mktemp ++ local LAST_ERR=/tmp/tmp.qyfBSqMeWU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LELxfFtJRD ++ cat /tmp/tmp.qyfBSqMeWU ++ rm /tmp/tmp.LELxfFtJRD /tmp/tmp.qyfBSqMeWU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VaPxi3GCvf +++ mktemp ++ local LAST_ERR=/tmp/tmp.mkxgCdcLKT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VaPxi3GCvf ++ cat /tmp/tmp.mkxgCdcLKT ++ rm /tmp/tmp.VaPxi3GCvf /tmp/tmp.mkxgCdcLKT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U8njYv0tPm +++ mktemp ++ local LAST_ERR=/tmp/tmp.eANIQXZrbL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U8njYv0tPm ++ cat /tmp/tmp.eANIQXZrbL ++ rm /tmp/tmp.U8njYv0tPm /tmp/tmp.eANIQXZrbL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mys8fd0Yu5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.QSqvXkUs8c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mys8fd0Yu5 ++ cat /tmp/tmp.QSqvXkUs8c ++ rm /tmp/tmp.mys8fd0Yu5 /tmp/tmp.QSqvXkUs8c ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.34xrrNngFb +++ mktemp ++ local LAST_ERR=/tmp/tmp.C7cS0LnE03 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.34xrrNngFb ++ cat /tmp/tmp.C7cS0LnE03 ++ rm /tmp/tmp.34xrrNngFb /tmp/tmp.C7cS0LnE03 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SFK7m1hU8T +++ mktemp ++ local LAST_ERR=/tmp/tmp.HYNxf3ZDsp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SFK7m1hU8T ++ cat /tmp/tmp.HYNxf3ZDsp ++ rm /tmp/tmp.SFK7m1hU8T /tmp/tmp.HYNxf3ZDsp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BZ6kFt88x5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gZ2tBNAYQn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BZ6kFt88x5 ++ cat /tmp/tmp.gZ2tBNAYQn ++ rm /tmp/tmp.BZ6kFt88x5 /tmp/tmp.gZ2tBNAYQn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ji2WkFCr9m +++ mktemp ++ local LAST_ERR=/tmp/tmp.wQvWMJhOpc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ji2WkFCr9m ++ cat /tmp/tmp.wQvWMJhOpc ++ rm /tmp/tmp.ji2WkFCr9m /tmp/tmp.wQvWMJhOpc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PO6BqPYpjp +++ mktemp ++ local LAST_ERR=/tmp/tmp.LLclfVzyaf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PO6BqPYpjp ++ cat /tmp/tmp.LLclfVzyaf ++ rm /tmp/tmp.PO6BqPYpjp /tmp/tmp.LLclfVzyaf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bKaPHkcvwY +++ mktemp ++ local LAST_ERR=/tmp/tmp.YKEeyfS04A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bKaPHkcvwY ++ cat /tmp/tmp.YKEeyfS04A ++ rm /tmp/tmp.bKaPHkcvwY /tmp/tmp.YKEeyfS04A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D1hCZmB1HW +++ mktemp ++ local LAST_ERR=/tmp/tmp.PxQ1qrt9AB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D1hCZmB1HW ++ cat /tmp/tmp.PxQ1qrt9AB ++ rm /tmp/tmp.D1hCZmB1HW /tmp/tmp.PxQ1qrt9AB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y7Ps8DEvor +++ mktemp ++ local LAST_ERR=/tmp/tmp.SKQgYjBUwa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y7Ps8DEvor ++ cat /tmp/tmp.SKQgYjBUwa ++ rm /tmp/tmp.Y7Ps8DEvor /tmp/tmp.SKQgYjBUwa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eeDXn9PSMY +++ mktemp ++ local LAST_ERR=/tmp/tmp.UPd4Uwywds ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eeDXn9PSMY ++ cat /tmp/tmp.UPd4Uwywds ++ rm /tmp/tmp.eeDXn9PSMY /tmp/tmp.UPd4Uwywds ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mdO2kz5YQs +++ mktemp ++ local LAST_ERR=/tmp/tmp.28WTQUYjUy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mdO2kz5YQs ++ cat /tmp/tmp.28WTQUYjUy ++ rm /tmp/tmp.mdO2kz5YQs /tmp/tmp.28WTQUYjUy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.znPaoBnV0m +++ mktemp ++ local LAST_ERR=/tmp/tmp.U2nx0Lc6iv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.znPaoBnV0m ++ cat /tmp/tmp.U2nx0Lc6iv ++ rm /tmp/tmp.znPaoBnV0m /tmp/tmp.U2nx0Lc6iv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eskhd5yimo +++ mktemp ++ local LAST_ERR=/tmp/tmp.RGteYEcJzW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eskhd5yimo ++ cat /tmp/tmp.RGteYEcJzW ++ rm /tmp/tmp.eskhd5yimo /tmp/tmp.RGteYEcJzW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GajBlFykL1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DOXiAYG2Ar ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GajBlFykL1 ++ cat /tmp/tmp.DOXiAYG2Ar ++ rm /tmp/tmp.GajBlFykL1 /tmp/tmp.DOXiAYG2Ar ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8xNJzFZBDN +++ mktemp ++ local LAST_ERR=/tmp/tmp.mKklyZQ5PE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8xNJzFZBDN ++ cat /tmp/tmp.mKklyZQ5PE ++ rm /tmp/tmp.8xNJzFZBDN /tmp/tmp.mKklyZQ5PE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aL838Spkos +++ mktemp ++ local LAST_ERR=/tmp/tmp.nI1U5Qb1AV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aL838Spkos ++ cat /tmp/tmp.nI1U5Qb1AV ++ rm /tmp/tmp.aL838Spkos /tmp/tmp.nI1U5Qb1AV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5LrRNt0DpM +++ mktemp ++ local LAST_ERR=/tmp/tmp.HriiFiRW4h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5LrRNt0DpM ++ cat /tmp/tmp.HriiFiRW4h ++ rm /tmp/tmp.5LrRNt0DpM /tmp/tmp.HriiFiRW4h ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8pnmhgVBnS +++ mktemp ++ local LAST_ERR=/tmp/tmp.zuDYuKIjgY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8pnmhgVBnS ++ cat /tmp/tmp.zuDYuKIjgY ++ rm /tmp/tmp.8pnmhgVBnS /tmp/tmp.zuDYuKIjgY ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.wHHhA6DPc4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.htnOzV4GBq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.wHHhA6DPc4 +++++ cat /tmp/tmp.htnOzV4GBq +++++ rm /tmp/tmp.wHHhA6DPc4 /tmp/tmp.htnOzV4GBq +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yxw5qcrwTu +++ mktemp ++ local LAST_ERR=/tmp/tmp.9j1aCZCupu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yxw5qcrwTu ++ cat /tmp/tmp.9j1aCZCupu ++ rm /tmp/tmp.yxw5qcrwTu /tmp/tmp.9j1aCZCupu ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DhS9uOLsu1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qc3WWIQxbN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DhS9uOLsu1 ++ cat /tmp/tmp.qc3WWIQxbN ++ rm /tmp/tmp.DhS9uOLsu1 /tmp/tmp.qc3WWIQxbN ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.qwP7aNEecm ++ mktemp + local LAST_ERR=/tmp/tmp.uucTZBrymy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qwP7aNEecm secret/my-cluster-secrets patched + cat /tmp/tmp.uucTZBrymy + rm /tmp/tmp.qwP7aNEecm /tmp/tmp.uucTZBrymy + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pgqFebZYgK +++ mktemp ++ local LAST_ERR=/tmp/tmp.O3G5rKYzFK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pgqFebZYgK ++ cat /tmp/tmp.O3G5rKYzFK ++ rm /tmp/tmp.pgqFebZYgK /tmp/tmp.O3G5rKYzFK ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G4rF7vEbMm +++ mktemp ++ local LAST_ERR=/tmp/tmp.3PIj3JzxkD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G4rF7vEbMm ++ cat /tmp/tmp.3PIj3JzxkD ++ rm /tmp/tmp.G4rF7vEbMm /tmp/tmp.3PIj3JzxkD ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Rcl7jR19lT ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.96783eNJVI +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Rcl7jR19lT +++++ cat /tmp/tmp.96783eNJVI +++++ rm /tmp/tmp.Rcl7jR19lT /tmp/tmp.96783eNJVI +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rtnLctegos +++ mktemp ++ local LAST_ERR=/tmp/tmp.v4cYwJspP7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rtnLctegos ++ cat /tmp/tmp.v4cYwJspP7 ++ rm /tmp/tmp.rtnLctegos /tmp/tmp.v4cYwJspP7 ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UzNlNQpRKe +++ mktemp ++ local LAST_ERR=/tmp/tmp.1YroN3KVUv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UzNlNQpRKe ++ cat /tmp/tmp.1YroN3KVUv ++ rm /tmp/tmp.UzNlNQpRKe /tmp/tmp.1YroN3KVUv ++ return 0 + client_pod=pxc-client-59944c5bbf-mwkf4 + wait_pod pxc-client-59944c5bbf-mwkf4 + local pod=pxc-client-59944c5bbf-mwkf4 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-mwkf4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-mwkf4 condition met waiting for pod/pxc-client-59944c5bbf-mwkf4 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nLauKsh5Ah/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2265/e2e-tests/users/compare/select-3.sql /tmp/tmp.nLauKsh5Ah/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KscyqrbyvE +++ mktemp ++ local LAST_ERR=/tmp/tmp.2uu3yCLhG8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KscyqrbyvE ++ cat /tmp/tmp.2uu3yCLhG8 ++ rm /tmp/tmp.KscyqrbyvE /tmp/tmp.2uu3yCLhG8 ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-8513 + local namespace=users-8513 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'get backup status: Job.batch' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v 'the object has been modified' + sort -u + grep -v level=info + tee /tmp/tmp.nLauKsh5Ah/operator.log ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.71RvLrTpci +++ mktemp ++ local LAST_ERR=/tmp/tmp.jx0iCZqgiO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.71RvLrTpci ++ cat /tmp/tmp.jx0iCZqgiO ++ rm /tmp/tmp.71RvLrTpci /tmp/tmp.jx0iCZqgiO ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-8547dbc67b-4l2tb ++ mktemp + local LAST_OUT=/tmp/tmp.yg7KIWYTSJ ++ mktemp + local LAST_ERR=/tmp/tmp.1tcrLHn8vt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-8547dbc67b-4l2tb + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yg7KIWYTSJ + cat /tmp/tmp.1tcrLHn8vt + rm /tmp/tmp.yg7KIWYTSJ /tmp/tmp.1tcrLHn8vt + return 0 2025-12-01T14:22:22.860Z INFO setup Manager starting up {"gitCommit": "30ece6d68dd5f91454c544aebee0e958f8e0dbeb", "gitBranch": "PR-2265-30ece6d6", "buildTime": "2025-12-01T11:50:55Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-12-01T14:22:22.860Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.14-gke.1033000"} 2025-12-01T14:22:22.863Z INFO setup Registering Components. 2025-12-01T14:22:24.067Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-12-01T14:22:24.067Z INFO setup Starting the Cmd. 2025-12-01T14:22:24.068Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-12-01T14:22:24.068Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-12-01T14:22:24.068Z INFO controller-runtime.metrics Starting metrics server 2025-12-01T14:22:24.068Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-12-01T14:22:24.068Z INFO controller-runtime.webhook Starting webhook server 2025-12-01T14:22:24.068Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-12-01T14:22:24.069Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-12-01T14:22:24.169Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-12-01T14:22:24.209Z DEBUG events percona-xtradb-cluster-operator-8547dbc67b-4l2tb_f3f2b6fd-9c89-41cc-aaa6-b15938b0fe2c became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"2c200495-a4f9-482a-af55-3befbe4701cc","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1764598944203391009"}, "reason": "LeaderElection"} 2025-12-01T14:22:24.209Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-12-01T14:22:24.209Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-12-01T14:22:24.209Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-12-01T14:22:24.209Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-12-01T14:22:24.209Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-12-01T14:22:24.310Z INFO Starting Controller {"controller": "pxc-controller"} 2025-12-01T14:22:24.310Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-12-01T14:22:24.310Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-12-01T14:22:24.310Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-12-01T14:22:24.410Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-12-01T14:22:24.410Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-12-01T14:22:56.265Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "88eb00ae-10a6-49ae-9a44-676c2ff3b0cb", "version": "1.19.0"} 2025-12-01T14:22:56.421Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "88eb00ae-10a6-49ae-9a44-676c2ff3b0cb", "secrets": "my-cluster-secrets"} 2025-12-01T14:22:56.637Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "88eb00ae-10a6-49ae-9a44-676c2ff3b0cb", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-12-01T14:22:56.753Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "88eb00ae-10a6-49ae-9a44-676c2ff3b0cb", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-01T14:22:56.785Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "88eb00ae-10a6-49ae-9a44-676c2ff3b0cb", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-01T14:22:56.832Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "88eb00ae-10a6-49ae-9a44-676c2ff3b0cb", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-01T14:22:56.871Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "88eb00ae-10a6-49ae-9a44-676c2ff3b0cb", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-01T14:22:56.957Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "88eb00ae-10a6-49ae-9a44-676c2ff3b0cb", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-01T14:22:57.057Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "88eb00ae-10a6-49ae-9a44-676c2ff3b0cb", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-01T14:22:57.899Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "049ee5df-344a-4f17-8b1c-463576e751d1", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-01T14:22:57.920Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "049ee5df-344a-4f17-8b1c-463576e751d1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-01T14:24:09.014Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a4424661-ed60-4182-9152-885bbce54ef8", "user": "operator"} 2025-12-01T14:24:09.044Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a4424661-ed60-4182-9152-885bbce54ef8", "user": "monitor"} 2025-12-01T14:24:09.090Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a4424661-ed60-4182-9152-885bbce54ef8"} 2025-12-01T14:24:09.122Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a4424661-ed60-4182-9152-885bbce54ef8"} 2025-12-01T14:24:09.154Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a4424661-ed60-4182-9152-885bbce54ef8", "user": "xtrabackup"} 2025-12-01T14:24:09.189Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a4424661-ed60-4182-9152-885bbce54ef8"} 2025-12-01T14:24:09.221Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a4424661-ed60-4182-9152-885bbce54ef8", "user": "replication"} 2025-12-01T14:24:09.231Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a4424661-ed60-4182-9152-885bbce54ef8", "err": "get primary pxc pod: not found"} 2025-12-01T14:24:13.977Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "3e28a4c3-2e7c-4a04-906d-2623ab0680d3", "err": "get primary pxc pod: not found"} 2025-12-01T14:24:19.163Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "10aaa65c-01ca-4126-be5f-862478fd5b15", "err": "get primary pxc pod: not found"} 2025-12-01T14:24:24.291Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "fb61f05f-cb3c-4d04-813a-f7d292c963e7", "err": "get primary pxc pod: not found"} 2025-12-01T14:26:35.366Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8ecc5c2d-bbc6-4572-bd0f-f0ec82122c44", "user": "root"} 2025-12-01T14:26:35.499Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8ecc5c2d-bbc6-4572-bd0f-f0ec82122c44", "new version": "8.0.43-34.1"} 2025-12-01T14:26:37.017Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8ecc5c2d-bbc6-4572-bd0f-f0ec82122c44"} 2025-12-01T14:26:42.804Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "b827c7cd-a8f3-4a24-90e3-ebb31525b2ec"} 2025-12-01T14:26:47.993Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "aad3e42f-8f4a-4448-b2eb-3bc08bd109e6"} 2025-12-01T14:26:53.434Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "5385ba34-af78-4201-b99d-49439fc1fa10"} 2025-12-01T14:26:58.707Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "9502ae7a-51d8-4d2e-af8d-3e2966875aa8"} 2025-12-01T14:27:03.999Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "d0cb339f-7d4f-4983-afed-8cdd6cc9d1ca"} 2025-12-01T14:27:09.295Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "9ceba426-e9cc-4803-9674-546fb6d71977"} 2025-12-01T14:27:14.631Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "21846921-f8cb-49c9-8d50-66d55cf24049"} 2025-12-01T14:27:19.697Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "2ea43816-70a4-416b-802f-2ac7fb89d8f7"} 2025-12-01T14:27:24.920Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "27bb6c27-aef7-4012-8a37-4bfae891c96d"} 2025-12-01T14:27:30.424Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "ec74c3c8-683e-4e5d-afce-336332335452"} 2025-12-01T14:27:35.829Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "d5db2a3c-fe9d-4c63-91e9-5a088f4c5a58"} 2025-12-01T14:27:41.815Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "ebe89a2f-b3a1-40a2-b70d-5884d67c6504"} 2025-12-01T14:27:46.130Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8107ebed-d97f-4a0a-8916-444f5ad9865b"} 2025-12-01T14:27:51.631Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "04a895f4-06fb-407a-9362-1a6ec2192bab"} 2025-12-01T14:27:57.386Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "73dc62bf-12c2-479c-a561-efd77e6405c2"} 2025-12-01T14:28:02.790Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "90cdfe93-15d5-4bf3-a64f-8e91ca4c8594"} 2025-12-01T14:28:08.300Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f6d10107-190e-4056-a42b-f0fb6fbcfa9d"} 2025-12-01T14:28:13.125Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "9660d6a1-23c4-4903-a027-82a3bd2229cb", "user": "root"} 2025-12-01T14:28:13.153Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "9660d6a1-23c4-4903-a027-82a3bd2229cb", "user": "root"} 2025-12-01T14:28:13.171Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "9660d6a1-23c4-4903-a027-82a3bd2229cb", "secret": "some-name-mysql-init", "user": "root"} 2025-12-01T14:28:13.927Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "22877f61-f711-45f1-9d70-c8c332d4338c"} 2025-12-01T14:28:15.313Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "9660d6a1-23c4-4903-a027-82a3bd2229cb"} 2025-12-01T14:28:15.332Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "9660d6a1-23c4-4903-a027-82a3bd2229cb", "user": "root"} 2025-12-01T14:28:15.354Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "9660d6a1-23c4-4903-a027-82a3bd2229cb", "user": "root"} 2025-12-01T14:28:17.190Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "9660d6a1-23c4-4903-a027-82a3bd2229cb"} 2025-12-01T14:28:24.090Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "37f2f357-505a-456b-82ab-c190fa3aa5fe"} 2025-12-01T14:28:29.386Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "4dd5f512-3aa4-4366-b931-ba7c6dac90e8"} 2025-12-01T14:28:32.430Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8c1b34f1-0312-4332-ab7b-373dd655b35b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:28:32.503Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8c1b34f1-0312-4332-ab7b-373dd655b35b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:28:35.254Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8c1b34f1-0312-4332-ab7b-373dd655b35b", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-01T14:28:49.867Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "dda0fcb3-29f8-4e22-ad36-287113e92861", "err": "get primary pxc pod: not found"} 2025-12-01T14:28:53.515Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "dda0fcb3-29f8-4e22-ad36-287113e92861", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-01T14:28:57.795Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "342633a6-81e0-489e-af81-d1bf876f6a4c", "user": "proxyadmin"} 2025-12-01T14:28:57.795Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "342633a6-81e0-489e-af81-d1bf876f6a4c", "user": "proxyadmin"} 2025-12-01T14:28:57.833Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "342633a6-81e0-489e-af81-d1bf876f6a4c", "user": "proxyadmin"} 2025-12-01T14:28:57.852Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "342633a6-81e0-489e-af81-d1bf876f6a4c", "user": "proxyadmin"} 2025-12-01T14:28:57.852Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "342633a6-81e0-489e-af81-d1bf876f6a4c", "last-applied-secret": "82c5c699c3c0c035f313eeaf9d92f33737d808cce12fce0851efcf31432d8138"} 2025-12-01T14:28:57.856Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "342633a6-81e0-489e-af81-d1bf876f6a4c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:28:58.686Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "fbbb5c85-97c2-47fe-8d45-6bc34e20375b", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-01T14:29:29.914Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "b9e68340-a546-4829-a6e4-219634a56b39"} 2025-12-01T14:29:31.830Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29af413a-1b8c-484f-a2c6-c18bc157ada2", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:29:31.883Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29af413a-1b8c-484f-a2c6-c18bc157ada2", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:29:41.746Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "32eb8703-325c-48d5-9d8f-d24986ba3394", "user": "xtrabackup"} 2025-12-01T14:29:41.764Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "32eb8703-325c-48d5-9d8f-d24986ba3394", "user": "xtrabackup"} 2025-12-01T14:29:41.788Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "32eb8703-325c-48d5-9d8f-d24986ba3394", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-01T14:29:41.830Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "32eb8703-325c-48d5-9d8f-d24986ba3394", "user": "xtrabackup"} 2025-12-01T14:29:41.844Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "32eb8703-325c-48d5-9d8f-d24986ba3394", "user": "xtrabackup"} 2025-12-01T14:29:41.852Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "32eb8703-325c-48d5-9d8f-d24986ba3394", "last-applied-secret": "c8bda7e3f2f0e73b03380c6aef4ab247c801f67a87718970af1063d0a3eaa1ab"} 2025-12-01T14:29:41.855Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "32eb8703-325c-48d5-9d8f-d24986ba3394", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:29:44.538Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "32eb8703-325c-48d5-9d8f-d24986ba3394"} 2025-12-01T14:29:47.014Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "32eb8703-325c-48d5-9d8f-d24986ba3394", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-8513 on 34.118.224.10:53: no such host"} 2025-12-01T14:30:40.052Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a73d13bd-e2d0-4fb6-84f1-11673c7b422f", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-8513 on 34.118.224.10:53: no such host"} 2025-12-01T14:30:46.531Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "23d658d4-f45f-49c0-a54a-75334fbb0fe7", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.239.82.49:33062: connect: connection refused"} 2025-12-01T14:31:33.360Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "62ad12bd-7079-4531-ac2b-473ddc7de3ae", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-8513 on 34.118.224.10:53: no such host"} 2025-12-01T14:31:38.646Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "7304b9b5-213a-4e32-84df-7a4502a0a0b1", "err": "failed to connect to pod some-name-pxc-0: invalid connection"} 2025-12-01T14:31:54.123Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "51180538-07b2-4d71-8fcc-367148402e81", "primary name": "some-name-pxc-0.some-name-pxc.users-8513.svc.cluster.local"} 2025-12-01T14:32:17.573Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "e02f0ad9-fbf4-4fa3-95df-3b08a9df2d3b"} 2025-12-01T14:32:22.205Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "0f3e1923-5f81-4e56-a9ef-4b921f5b2362", "user": "monitor"} 2025-12-01T14:32:22.222Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "0f3e1923-5f81-4e56-a9ef-4b921f5b2362", "user": "monitor"} 2025-12-01T14:32:22.241Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "0f3e1923-5f81-4e56-a9ef-4b921f5b2362", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-01T14:32:22.265Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "0f3e1923-5f81-4e56-a9ef-4b921f5b2362", "user": "monitor"} 2025-12-01T14:32:22.282Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "0f3e1923-5f81-4e56-a9ef-4b921f5b2362", "user": "monitor"} 2025-12-01T14:32:22.366Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "07394dd4-624b-4760-85c4-9d26f60f180e"} 2025-12-01T14:32:22.579Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "0f3e1923-5f81-4e56-a9ef-4b921f5b2362", "last-applied-secret": "976122f713107b83f1f8b1115d3260bac4380b593f7dd3090b0d7752b9b58898"} 2025-12-01T14:32:22.585Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "0f3e1923-5f81-4e56-a9ef-4b921f5b2362", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:32:25.014Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "0f3e1923-5f81-4e56-a9ef-4b921f5b2362", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-01T14:33:03.997Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a2a958ee-45da-4878-8af9-846f94988913", "user": "monitor"} 2025-12-01T14:33:05.973Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a2a958ee-45da-4878-8af9-846f94988913"} 2025-12-01T14:33:08.984Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "d7ed024a-0fbc-4c69-8e18-d18608eadead", "user": "monitor"} 2025-12-01T14:33:11.269Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "d7ed024a-0fbc-4c69-8e18-d18608eadead"} 2025-12-01T14:33:14.551Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "2f5987c2-55a3-436e-8634-69d3856b7645", "user": "monitor"} 2025-12-01T14:33:16.285Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "2f5987c2-55a3-436e-8634-69d3856b7645"} 2025-12-01T14:33:19.890Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "75af53df-19b7-47d0-8ef9-a6da2777e865", "user": "monitor"} 2025-12-01T14:33:21.810Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "75af53df-19b7-47d0-8ef9-a6da2777e865"} 2025-12-01T14:33:25.588Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8f2cfeee-66c6-4ea0-a267-f7f6cc605900", "user": "monitor"} 2025-12-01T14:33:28.010Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8f2cfeee-66c6-4ea0-a267-f7f6cc605900"} 2025-12-01T14:33:31.298Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "b7fe6964-c28d-4cbf-9b88-0e56d06c8904", "user": "monitor"} 2025-12-01T14:33:31.941Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "b7fe6964-c28d-4cbf-9b88-0e56d06c8904", "user": "monitor"} 2025-12-01T14:33:31.957Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "b7fe6964-c28d-4cbf-9b88-0e56d06c8904", "last-applied-secret": "976122f713107b83f1f8b1115d3260bac4380b593f7dd3090b0d7752b9b58898"} 2025-12-01T14:33:33.596Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "b7fe6964-c28d-4cbf-9b88-0e56d06c8904"} 2025-12-01T14:33:39.217Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "cc06996b-fadb-4346-ac36-e7a0c436c153"} 2025-12-01T14:33:44.383Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "3cdab7d6-5da9-43bd-8373-05464fec3928"} 2025-12-01T14:33:49.362Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "425f9bfb-fe7d-4cb3-b92b-c086c2c40bd7"} 2025-12-01T14:33:54.799Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "689c0185-281a-4fcb-9a37-4ef1e1c47fd5"} 2025-12-01T14:33:59.484Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a7cd413e-7df9-4e95-a61a-3b8aab3addb7", "user": "operator"} 2025-12-01T14:33:59.501Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a7cd413e-7df9-4e95-a61a-3b8aab3addb7", "user": "operator"} 2025-12-01T14:33:59.518Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a7cd413e-7df9-4e95-a61a-3b8aab3addb7", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-01T14:33:59.536Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a7cd413e-7df9-4e95-a61a-3b8aab3addb7", "user": "operator"} 2025-12-01T14:33:59.553Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a7cd413e-7df9-4e95-a61a-3b8aab3addb7", "user": "operator"} 2025-12-01T14:33:59.578Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a7cd413e-7df9-4e95-a61a-3b8aab3addb7", "last-applied-secret": "84f721c477fd0badc79d6dcf0aa001df2edd6fbaa41029324b771dc166604661"} 2025-12-01T14:33:59.585Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a7cd413e-7df9-4e95-a61a-3b8aab3addb7", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:34:00.690Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4a17d69-df19-4b3c-a468-92a2e0405525", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-01T14:34:48.642Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8453bae1-9785-44cd-9958-03f92f52c307"} 2025-12-01T14:34:52.750Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a96a1133-dc9e-4a19-975f-cdf6adfbb782"} 2025-12-01T14:34:58.104Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "17d02178-c6b5-4919-b32f-fbf3e0c20283"} 2025-12-01T14:35:03.537Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "9a9f3715-827a-4f22-aed6-c39b44a9e17f"} 2025-12-01T14:35:04.254Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "secrets": "my-cluster-secrets-2"} 2025-12-01T14:35:04.261Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "root"} 2025-12-01T14:35:04.286Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "root"} 2025-12-01T14:35:04.309Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "secret": "some-name-mysql-init", "user": "root"} 2025-12-01T14:35:06.819Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d"} 2025-12-01T14:35:06.842Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "root"} 2025-12-01T14:35:06.864Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "root"} 2025-12-01T14:35:06.872Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "operator"} 2025-12-01T14:35:06.889Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "operator"} 2025-12-01T14:35:06.908Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-01T14:35:06.934Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "operator"} 2025-12-01T14:35:06.952Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "operator"} 2025-12-01T14:35:06.961Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "monitor"} 2025-12-01T14:35:06.978Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "monitor"} 2025-12-01T14:35:06.997Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-01T14:35:07.019Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "monitor"} 2025-12-01T14:35:07.037Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "monitor"} 2025-12-01T14:35:07.322Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "xtrabackup"} 2025-12-01T14:35:07.339Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "xtrabackup"} 2025-12-01T14:35:07.359Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-01T14:35:07.381Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "xtrabackup"} 2025-12-01T14:35:07.396Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "xtrabackup"} 2025-12-01T14:35:07.402Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "replication"} 2025-12-01T14:35:07.417Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "replication"} 2025-12-01T14:35:07.436Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "secret": "some-name-mysql-init", "user": "replication"} 2025-12-01T14:35:07.453Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "replication"} 2025-12-01T14:35:07.469Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "replication"} 2025-12-01T14:35:07.469Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "proxyadmin"} 2025-12-01T14:35:07.491Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "proxyadmin"} 2025-12-01T14:35:07.509Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "user": "proxyadmin"} 2025-12-01T14:35:07.509Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "last-applied-secret": "24ac5519f1f220e885b02097d9a70808827810df8a11ef293bfdbc0c75b63079"} 2025-12-01T14:35:07.509Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "last-applied-secret": "24ac5519f1f220e885b02097d9a70808827810df8a11ef293bfdbc0c75b63079"} 2025-12-01T14:35:07.512Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:35:07.560Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:35:09.258Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f4138f0b-b17c-4b97-8a7d-ff1cf817b48d", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-01T14:36:10.822Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "eaa9df72-2352-4831-b1c9-c49723cd26c2", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-8513 on 34.118.224.10:53: no such host"} 2025-12-01T14:36:47.195Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "c8917ed8-25d7-4bf9-8868-23771dd0f34d", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-8513 on 34.118.224.10:53: no such host"} 2025-12-01T14:36:47.528Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "0de7f436-c23c-44bd-aea1-9a6ea1ab4651", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-8513 on 34.118.224.10:53: no such host"} 2025-12-01T14:36:52.502Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8af51665-be39-4231-98a3-529387a9d780", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-8513 on 34.118.224.10:53: no such host"} 2025-12-01T14:36:57.698Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "11dce7c5-7fa4-4be5-8be1-2c1242bd6e39", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.239.80.50:33062: connect: connection refused"} 2025-12-01T14:37:02.867Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "0b10434b-08cc-4b8f-9190-8d240a13c0d0", "primary name": "some-name-pxc-0.some-name-pxc.users-8513.svc.cluster.local"} 2025-12-01T14:37:18.304Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "92d676df-f098-44c9-a495-4e4ff847aa72", "primary name": "some-name-pxc-0.some-name-pxc.users-8513.svc.cluster.local"} 2025-12-01T14:37:34.557Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "51afcd49-8448-4c9a-92c6-e5750e0471a3", "user": "monitor"} 2025-12-01T14:37:35.417Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "51afcd49-8448-4c9a-92c6-e5750e0471a3", "user": "monitor"} 2025-12-01T14:37:35.431Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "51afcd49-8448-4c9a-92c6-e5750e0471a3", "last-applied-secret": "24ac5519f1f220e885b02097d9a70808827810df8a11ef293bfdbc0c75b63079"} 2025-12-01T14:37:38.231Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "51afcd49-8448-4c9a-92c6-e5750e0471a3"} 2025-12-01T14:37:40.978Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "00ca9404-887b-48ba-a8fe-ac0e206f9836", "user": "operator"} 2025-12-01T14:37:40.996Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "00ca9404-887b-48ba-a8fe-ac0e206f9836", "user": "operator"} 2025-12-01T14:37:41.018Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "00ca9404-887b-48ba-a8fe-ac0e206f9836", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-01T14:37:41.058Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "00ca9404-887b-48ba-a8fe-ac0e206f9836", "user": "operator"} 2025-12-01T14:37:41.077Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "00ca9404-887b-48ba-a8fe-ac0e206f9836", "user": "operator"} 2025-12-01T14:37:41.100Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "00ca9404-887b-48ba-a8fe-ac0e206f9836", "last-applied-secret": "8adf2850f9fc4c481848b0b1c0f5b28bbaa68a9a1ffc2dc4de69e020065efe9b"} 2025-12-01T14:37:41.104Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "00ca9404-887b-48ba-a8fe-ac0e206f9836", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:37:43.033Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "621e9aca-ee96-4b44-9272-145246b7ddc5", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-01T14:38:10.869Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "f28e68a7-2d69-4b42-8381-22083567f181"} 2025-12-01T14:38:25.283Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a1a1f1ae-120a-4a1d-b7cc-211fb7e81fee"} 2025-12-01T14:38:30.256Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "9ccc64b6-fd82-41ea-a8db-6c2d9f452eac"} 2025-12-01T14:38:35.477Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "69a174d0-0945-4427-b1c0-174abff22241"} 2025-12-01T14:38:40.552Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "b346b313-55cd-465b-98d6-5ff4f3c5babc"} 2025-12-01T14:38:46.372Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a8d6c9b8-2b5f-410f-b196-352ad24f4d3a"} 2025-12-01T14:38:51.439Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "c2889ae8-6a7a-475c-9306-69b93bcbbd71"} 2025-12-01T14:38:56.782Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "4ea83f99-5187-4b32-b244-d94ff355fa49"} 2025-12-01T14:39:01.768Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "fb37cbde-5f5f-405d-a176-a6349655fe9f"} 2025-12-01T14:39:07.266Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "b112ef43-a5da-4fdf-a60e-c6f16b033a0d"} 2025-12-01T14:39:12.582Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "dfac5ccb-747a-4486-9798-bbcc5fab6565"} 2025-12-01T14:39:17.753Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "39244275-742c-4557-b46c-daf2c947fc8b"} 2025-12-01T14:39:23.388Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "ac03ff38-e156-482a-8317-373f4e549bee"} 2025-12-01T14:39:27.851Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "root"} 2025-12-01T14:39:27.884Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "root"} 2025-12-01T14:39:27.937Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "secret": "some-name-mysql-init", "user": "root"} 2025-12-01T14:39:28.902Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "5795582f-6d2a-4ec0-b5a0-22bef2b8af8a"} 2025-12-01T14:39:30.154Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857"} 2025-12-01T14:39:30.175Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "root"} 2025-12-01T14:39:30.198Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "root"} 2025-12-01T14:39:30.211Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "monitor"} 2025-12-01T14:39:30.226Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "monitor"} 2025-12-01T14:39:30.244Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-01T14:39:30.265Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "monitor"} 2025-12-01T14:39:30.285Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "monitor"} 2025-12-01T14:39:30.363Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "xtrabackup"} 2025-12-01T14:39:30.380Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "xtrabackup"} 2025-12-01T14:39:30.396Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-01T14:39:30.418Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "xtrabackup"} 2025-12-01T14:39:30.434Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "xtrabackup"} 2025-12-01T14:39:30.441Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "proxyadmin"} 2025-12-01T14:39:30.464Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "proxyadmin"} 2025-12-01T14:39:30.482Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "user": "proxyadmin"} 2025-12-01T14:39:30.482Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "last-applied-secret": "0da9507d24ac7c78d0a05d517a92d717684ca4b74fd8827602c7d4f20e1774c5"} 2025-12-01T14:39:30.482Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "last-applied-secret": "0da9507d24ac7c78d0a05d517a92d717684ca4b74fd8827602c7d4f20e1774c5"} 2025-12-01T14:39:30.485Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:39:30.552Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:39:32.372Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "636d16d8-7f81-4e9b-b1a9-42fcefe38857", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-01T14:39:49.176Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "92303d6f-95d1-4916-acf7-c424d2d87998", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:39:49.219Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "92303d6f-95d1-4916-acf7-c424d2d87998", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-01T14:39:49.267Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "92303d6f-95d1-4916-acf7-c424d2d87998", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-01T14:39:49.347Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "92303d6f-95d1-4916-acf7-c424d2d87998", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-01T14:39:49.458Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "92303d6f-95d1-4916-acf7-c424d2d87998", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-01T14:39:50.116Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "60826fad-07d2-43fc-8b25-e65bb5cb9d94", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-01T14:41:27.066Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "51b40583-d874-4a02-8457-ac5bbec53aa2", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-8513 on 34.118.224.10:53: no such host"} 2025-12-01T14:42:48.778Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "root"} 2025-12-01T14:42:48.809Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "root"} 2025-12-01T14:42:48.828Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "secret": "some-name-mysql-init", "user": "root"} 2025-12-01T14:42:48.844Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "root"} 2025-12-01T14:42:48.866Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "root"} 2025-12-01T14:42:48.871Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "operator"} 2025-12-01T14:42:48.887Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "operator"} 2025-12-01T14:42:48.908Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-01T14:42:48.932Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "operator"} 2025-12-01T14:42:48.946Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "operator"} 2025-12-01T14:42:48.954Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "monitor"} 2025-12-01T14:42:48.969Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "monitor"} 2025-12-01T14:42:48.992Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-01T14:42:49.015Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "monitor"} 2025-12-01T14:42:49.317Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "xtrabackup"} 2025-12-01T14:42:49.331Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "xtrabackup"} 2025-12-01T14:42:49.347Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-01T14:42:49.364Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "xtrabackup"} 2025-12-01T14:42:49.377Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "xtrabackup"} 2025-12-01T14:42:49.383Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "replication"} 2025-12-01T14:42:49.397Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "replication"} 2025-12-01T14:42:49.414Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "secret": "some-name-mysql-init", "user": "replication"} 2025-12-01T14:42:49.431Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "replication"} 2025-12-01T14:42:49.443Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "user": "replication"} 2025-12-01T14:42:49.443Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "last-applied-secret": "84f721c477fd0badc79d6dcf0aa001df2edd6fbaa41029324b771dc166604661"} 2025-12-01T14:42:49.446Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "29f99ba8-9780-45a9-a43a-339a5eda1ab1", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-01T14:43:51.602Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "1cf4926a-a613-4bad-a4f0-07b24b91f2d7", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.239.82.59:33062: connect: connection refused"} 2025-12-01T14:44:33.213Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "16e605b2-3b76-4014-9a24-b94789c984b3", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-8513 on 34.118.224.10:53: no such host"} 2025-12-01T14:45:10.189Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a4878749-a4e9-4772-b8ed-2f85b3f6f110", "user": "monitor"} 2025-12-01T14:45:11.835Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "a4878749-a4e9-4772-b8ed-2f85b3f6f110", "user": "monitor"} 2025-12-01T14:45:13.120Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8d6d12a6-fd05-46c6-af49-bd0cf30a6fa1", "user": "monitor"} 2025-12-01T14:45:13.136Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8d6d12a6-fd05-46c6-af49-bd0cf30a6fa1", "user": "monitor"} 2025-12-01T14:45:13.152Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8d6d12a6-fd05-46c6-af49-bd0cf30a6fa1", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-01T14:45:13.170Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "8d6d12a6-fd05-46c6-af49-bd0cf30a6fa1", "user": "monitor"} 2025-12-01T14:45:15.186Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "60a89854-3303-420c-9e68-7b05cc8560db", "user": "monitor"} 2025-12-01T14:45:20.742Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "87201e87-4f7d-4b60-b091-6bc9f1254a65", "user": "monitor"} 2025-12-01T14:45:26.326Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "d73cb94e-4dbc-442f-8366-2ace65fdb33a", "user": "monitor"} 2025-12-01T14:45:32.177Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "0aad5ffb-c294-44ed-ac04-fe4364000a0f", "user": "monitor"} 2025-12-01T14:45:37.822Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "cd1c6f49-8176-47f3-80c8-661319466ad8", "user": "monitor"} 2025-12-01T14:45:43.794Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8513", "name": "some-name", "reconcileID": "e0f449a8-0574-40ed-8d23-31d4b4ca55a9", "user": "monitor"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:856 [mysql] 2025/12/01 14:31:38 packets.go:58 read tcp 10.239.81.63:45240->10.239.80.47:33062: read: connection reset by peer [mysql] 2025/12/01 14:42:26 packets.go:58 unexpected EOF -  }, -  { -  }, -  { -  }, -  }, +  }, -  "0da9507d24ac7c78d0a05d517a92d717684ca4b74fd8827602c7d4f20e1774c5", +  "0da9507d24ac7c78d0a05d517a92d717684ca4b74fd8827602c7d4f20e1774c5", -  "24ac5519f1f220e885b02097d9a70808827810df8a11ef293bfdbc0c75b63079", +  "24ac5519f1f220e885b02097d9a70808827810df8a11ef293bfdbc0c75b63079", -  "82c5c699c3c0c035f313eeaf9d92f33737d808cce12fce0851efcf31432d813", -  "84f721c477fd0badc79d6dcf0aa001df2edd6fbaa41029324b771dc166604661", +  "84f721c477fd0badc79d6dcf0aa001df2edd6fbaa41029324b771dc166604661", -  "8adf2850f9fc4c481848b0b1c0f5b28bbaa68a9a1ffc2dc4de69e020065efe9b", +  "8adf2850f9fc4c481848b0b1c0f5b28bbaa68a9a1ffc2dc4de69e020065efe9b", +  "976122f713107b83f1f8b1115d3260bac4380b593f7dd3090b0d7752b9b5889", -  "976122f713107b83f1f8b1115d3260bac4380b593f7dd3090b0d7752b9b58898", -  Annotations: map[string]string{ +  Annotations: map[string]string{ +  APIVersion: "", -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  APIVersion: "v1", -  Args: []string{"logrotate"}, +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, -  "c8bda7e3f2f0e73b03380c6aef4ab247c801f67a87718970af1063d0a3eaa1ab", -  CollisionCount: &0, +  CollisionCount: nil, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-12-01 14:22:56 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-proxysql-5d56757fb5", -  CurrentRevision: "some-name-proxysql-6d7cf5496d", -  CurrentRevision: "some-name-proxysql-76c5b985cd", -  CurrentRevision: "some-name-proxysql-76dbd49c57", -  CurrentRevision: "some-name-proxysql-975c99b84", -  CurrentRevision: "some-name-proxysql-bb8544f88", -  CurrentRevision: "some-name-pxc-566ddbb85c", -  CurrentRevision: "some-name-pxc-698d77695b", -  CurrentRevision: "some-name-pxc-6db787d7", -  CurrentRevision: "some-name-pxc-f6dd7fd7b", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, -  Env: []v1.EnvVar{ -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Generation: 5, -  Generation: 6, -  Generation: 7, -  Generation: 8, -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  ImagePullPolicy: "Always", +  "last-applied-secret": "82c5c699c3c0c035f313eeaf9d92f33737d808cce12fce0851efcf31432d8138", +  "last-applied-secret": "c8bda7e3f2f0e73b03380c6aef4ab247c801f67a87718970af1063d0a3eaa1ab", +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", -  {Name: "IS_LOGCOLLECTOR", Value: "yes"}, -  {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, -  Name: "logrotate", -  Name: "logs", -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "SERVICE_TYPE", Value: "mysql"}, +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  ObservedGeneration: 5, -  ObservedGeneration: 6, -  ObservedGeneration: 7, -  ObservedGeneration: 8, -  Operation: "Update", -  Operation: "Update", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGRhOTUwN2QyNGFjN2M3OGQwYTA1ZDUxN2E5MmQ3MTc2ODRjYTRiNzRmZDg4Mjc2MDJjN2Q0ZjIwZTE3NzRjNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjRhYzU1MTlmMWYyMjBlODg1YjAyMDk3ZDlhNzA4MDg4Mjc4MTBkZjhhMTFlZjI5M2JmZGJjMGM3NWI2MzA3OSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjRhYzU1MTlmMWYyMjBlODg1YjAyMDk3ZDlhNzA4MDg4Mjc4MTBkZjhhMTFlZjI5M2JmZGJjMGM3NWI2MzA3OSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODJjNWM2OTljM2MwYzAzNWYzMTNlZWFmOWQ5MmYzMzczN2Q4MDhjY2UxMmZjZTA4NTFlZmNmMzE0MzJkODEzOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODRmNzIxYzQ3N2ZkMGJhZGM3OWQ2ZGNmMGFhMDAxZGYyZWRkNmZiYWE0MTAyOTMyNGI3NzFkYzE2NjYwNDY2MSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODRmNzIxYzQ3N2ZkMGJhZGM3OWQ2ZGNmMGFhMDAxZGYyZWRkNmZiYWE0MTAyOTMyNGI3NzFkYzE2NjYwNDY2MSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGFkZjI4NTBmOWZjNGM0ODE4NDhiMGIxYzBmNWIyOGJiYWE2OGE5YTFmZmMyZGM0ZGU2OWUwMjAwNjVlZmU5YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGFkZjI4NTBmOWZjNGM0ODE4NDhiMGIxYzBmNWIyOGJiYWE2OGE5YTFmZmMyZGM0ZGU2OWUwMjAwNjVlZmU5YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTc2MTIyZjcxMzEwN2I4M2YxZjhiMTExNWQzMjYwYmFjNDM4MGI1OTNmN2RkMzA5MGIwZDc3NTJiOWI1ODg5OCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTc2MTIyZjcxMzEwN2I4M2YxZjhiMTExNWQzMjYwYmFjNDM4MGI1OTNmN2RkMzA5MGIwZDc3NTJiOWI1ODg5OCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGRhOTUwN2QyNGFjN2M3OGQwYTA1ZDUxN2E5MmQ3MTc2ODRjYTRiNzRmZDg4Mjc2MDJjN2Q0ZjIwZTE3NzRjNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGRhOTUwN2QyNGFjN2M3OGQwYTA1ZDUxN2E5MmQ3MTc2ODRjYTRiNzRmZDg4Mjc2MDJjN2Q0ZjIwZTE3NzRjNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGRhOTUwN2QyNGFjN2M3OGQwYTA1ZDUxN2E5MmQ3MTc2ODRjYTRiNzRmZDg4Mjc2MDJjN2Q0ZjIwZTE3NzRjNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjY1LTMwZWNlNmQ2IiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM4LjAiLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGRhOTUwN2QyNGFjN2M3OGQwYTA1ZDUxN2E5MmQ3MTc2ODRjYTRiNzRmZDg4Mjc2MDJjN2Q0ZjIwZTE3NzRjNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjY1LTMwZWNlNmQ2IiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiI4NDI1NTEyIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjRhYzU1MTlmMWYyMjBlODg1YjAyMDk3ZDlhNzA4MDg4Mjc4MTBkZjhhMTFlZjI5M2JmZGJjMGM3NWI2MzA3OSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMjRhYzU1MTlmMWYyMjBlODg1YjAyMDk3ZDlhNzA4MDg4Mjc4MTBkZjhhMTFlZjI5M2JmZGJjMGM3NWI2MzA3OSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODRmNzIxYzQ3N2ZkMGJhZGM3OWQ2ZGNmMGFhMDAxZGYyZWRkNmZiYWE0MTAyOTMyNGI3NzFkYzE2NjYwNDY2MSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYzhiZGE3ZTNmMmYwZTczYjAzMzgwYzZhZWY0YWIyNDdjODAxZjY3YTg3NzE4OTcwYWYxMDYzZDBhM2VhYTFhYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYzhiZGE3ZTNmMmYwZTczYjAzMzgwYzZhZWY0YWIyNDdjODAxZjY3YTg3NzE4OTcwYWYxMDYzZDBhM2VhYTFhYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODJjNWM2OTljM2MwYzAzNWYzMTNlZWFmOWQ5MmYzMzczN2Q4MDhjY2UxMmZjZTA4NTFlZmNmMzE0MzJkODEzOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, +  Replicas: 0, -  Replicas: 2, -  Replicas: &2, +  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, +  ResourceVersion: "", -  ResourceVersion: "1764599013721999010", -  ResourceVersion: "1764599192730127010", -  ResourceVersion: "1764599327371343010", -  ResourceVersion: "1764599364112591010", -  ResourceVersion: "1764599384133055010", -  ResourceVersion: "1764599534186431010", -  ResourceVersion: "1764599570411983010", -  ResourceVersion: "1764599662650367010", -  ResourceVersion: "1764599737681055010", -  ResourceVersion: "1764599852659951010", -  ResourceVersion: "1764599884897151010", -  ResourceVersion: "1764599986517631010", -  ResourceVersion: "1764600167940959010", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2025-12-01 14:22:56 +0000 UTC", -  Time: s"2025-12-01 14:23:33 +0000 UTC", -  Time: s"2025-12-01 14:26:32 +0000 UTC", -  Time: s"2025-12-01 14:28:32 +0000 UTC", -  Time: s"2025-12-01 14:28:47 +0000 UTC", -  Time: s"2025-12-01 14:28:57 +0000 UTC", -  Time: s"2025-12-01 14:29:24 +0000 UTC", -  Time: s"2025-12-01 14:29:31 +0000 UTC", -  Time: s"2025-12-01 14:29:41 +0000 UTC", -  Time: s"2025-12-01 14:29:44 +0000 UTC", -  Time: s"2025-12-01 14:32:14 +0000 UTC", -  Time: s"2025-12-01 14:32:22 +0000 UTC", -  Time: s"2025-12-01 14:32:50 +0000 UTC", -  Time: s"2025-12-01 14:33:59 +0000 UTC", -  Time: s"2025-12-01 14:34:22 +0000 UTC", -  Time: s"2025-12-01 14:35:07 +0000 UTC", -  Time: s"2025-12-01 14:35:37 +0000 UTC", -  Time: s"2025-12-01 14:37:32 +0000 UTC", -  Time: s"2025-12-01 14:37:41 +0000 UTC", -  Time: s"2025-12-01 14:38:04 +0000 UTC", -  Time: s"2025-12-01 14:39:30 +0000 UTC", -  Time: s"2025-12-01 14:39:46 +0000 UTC", -  Time: s"2025-12-01 14:39:49 +0000 UTC", -  Time: s"2025-12-01 14:42:47 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "64416089-948e-4cd1-8446-b17d20e89bf9", -  UID: "e70bf990-6359-47a6-bace-f291b225c7cf", +  UpdatedReplicas: 0, -  UpdatedReplicas: 1, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-proxysql-5d56757fb5", -  UpdateRevision: "some-name-proxysql-6d7cf5496d", -  UpdateRevision: "some-name-proxysql-76c5b985cd", -  UpdateRevision: "some-name-proxysql-76dbd49c57", -  UpdateRevision: "some-name-proxysql-975c99b84", -  UpdateRevision: "some-name-proxysql-bb8544f88", -  UpdateRevision: "some-name-pxc-566ddbb85c", -  UpdateRevision: "some-name-pxc-695c884c8c", -  UpdateRevision: "some-name-pxc-698d77695b", -  UpdateRevision: "some-name-pxc-6db787d7", -  UpdateRevision: "some-name-pxc-f6dd7fd7b", -  VolumeMode: &"Filesystem", +  VolumeMode: nil, -  VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}},   }    },    },    {    },    },    {    },    }, ""),    },    {    },    },    },    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 3 identical elements    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical fields    ... // 5 identical elements    ... // 5 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 6 identical fields    ... // 7 identical fields    "8",    ... // 8 identical fields    ... // 9 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Annotations: map[string]string{    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMapKeyRef: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    Containers: []v1.Container{    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...},    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    FieldPath: "metadata.name",    FieldPath: "metadata.namespace",    FieldRef: &v1.ObjectFieldSelector{    Finalizers: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostIP: "",    HostPort: 0,    ImagePullPolicy: "Always",    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    "kubectl.kubernetes.io/default-container": "proxysql",    "kubectl.kubernetes.io/default-container": "pxc",    Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: nil,    "last-applied-secret": strings.Join({    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-pxc"},    ManagedFields: nil,    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    {Name: "CLUSTER_HASH", Value: "8425512"},    Name: "config",    Name: "ist",    {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"},    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}},    Name: "POD_NAME",    Name: "POD_NAMESPASE",    Name: "proxyadm",    Namespace: "users-8513",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}},    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "82c5c699c3c0c035f313eeaf9d92f33737d808cce12fce0851efcf31432d8138", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "0da9507d24ac7c78d0a05d517a92d717684ca4b74fd8827602c7d4f20e1774c5", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "661ab6c8-2203-4bc5-bf66-e3455a282178", ...}},    OwnerReferences: nil,    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &2,    Replicas: &3,    ResizePolicy: nil,    ResourceFieldRef: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}},    SecretName: "internal-some-name",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    Value: "",    ValueFrom: &v1.EnvVarSource{    VolumeAttributesClassName: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-8513 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.gT3tE5ZBoB ++ mktemp + local LAST_ERR=/tmp/tmp.7TmSbxZWa9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gT3tE5ZBoB perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-8513 namespace + cat /tmp/tmp.7TmSbxZWa9 + rm /tmp/tmp.gT3tE5ZBoB /tmp/tmp.7TmSbxZWa9 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.FpC2NfqXbu ++ mktemp + local LAST_ERR=/tmp/tmp.e291JgglTA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FpC2NfqXbu No resources found + cat /tmp/tmp.e291JgglTA + rm /tmp/tmp.FpC2NfqXbu /tmp/tmp.e291JgglTA + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.4017eKhIxq ++ mktemp + local LAST_ERR=/tmp/tmp.sKCzyljgGa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4017eKhIxq No resources found + cat /tmp/tmp.sKCzyljgGa + rm /tmp/tmp.4017eKhIxq /tmp/tmp.sKCzyljgGa + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.P7tBKlGAUL ++ mktemp + local LAST_ERR=/tmp/tmp.3yiAXZrUH7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.P7tBKlGAUL validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.3yiAXZrUH7 + rm /tmp/tmp.P7tBKlGAUL /tmp/tmp.3yiAXZrUH7 + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-8513 + rm -rf /tmp/tmp.nLauKsh5Ah + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.90cu2tIc0d + desc 'test passed' + local LAST_OUT=/tmp/tmp.l8yxGEjrCm + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.6Omql5W512 + local exit_status=0 + local LAST_ERR=/tmp/tmp.UlGcMaMxD7 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-8513