Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/logs/users-5-7.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-20579 + local ns=users-20579 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-11094 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.LDsSiWi8Oj ++ mktemp + local LAST_ERR=/tmp/tmp.yn5eTraKU9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LDsSiWi8Oj perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-11094 namespace + cat /tmp/tmp.yn5eTraKU9 + rm /tmp/tmp.LDsSiWi8Oj /tmp/tmp.yn5eTraKU9 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.qBlbfqH0M9 ++ mktemp + local LAST_ERR=/tmp/tmp.FF6U49P9aY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qBlbfqH0M9 No resources found + cat /tmp/tmp.FF6U49P9aY + rm /tmp/tmp.qBlbfqH0M9 /tmp/tmp.FF6U49P9aY + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.CKgS2mffCZ ++ mktemp + local LAST_ERR=/tmp/tmp.6xA9rIcFyP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CKgS2mffCZ No resources found + cat /tmp/tmp.6xA9rIcFyP + rm /tmp/tmp.CKgS2mffCZ /tmp/tmp.6xA9rIcFyP + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ tail -n1 ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns ++ mktemp ++ mktemp + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + local LAST_OUT=/tmp/tmp.90e5bzS32D + local LAST_OUT=/tmp/tmp.Ep2wNi1uhI ++ mktemp + local LAST_ERR=/tmp/tmp.1TYiwfCHgQ + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.zVNmjm1C4i + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ep2wNi1uhI + cat /tmp/tmp.zVNmjm1C4i + rm /tmp/tmp.Ep2wNi1uhI /tmp/tmp.zVNmjm1C4i + return 0 namespace "users-11094" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.90e5bzS32D namespace "pxc-operator" deleted + cat /tmp/tmp.1TYiwfCHgQ + rm /tmp/tmp.90e5bzS32D /tmp/tmp.1TYiwfCHgQ + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.F0Wresk4QZ ++ mktemp + local LAST_ERR=/tmp/tmp.r4sdFiwUhY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.F0Wresk4QZ namespace/pxc-operator created + cat /tmp/tmp.r4sdFiwUhY + rm /tmp/tmp.F0Wresk4QZ /tmp/tmp.r4sdFiwUhY + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.JuBYI8Tr0e +++ mktemp ++ local LAST_ERR=/tmp/tmp.H7HsxSXd8K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JuBYI8Tr0e ++ cat /tmp/tmp.H7HsxSXd8K ++ rm /tmp/tmp.JuBYI8Tr0e /tmp/tmp.H7HsxSXd8K ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2199-baa7db2e-5-cluster4 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.x9mA5tHs6a ++ mktemp + local LAST_ERR=/tmp/tmp.EzaCDyJOzh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2199-baa7db2e-5-cluster4 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.x9mA5tHs6a Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2199-baa7db2e-5-cluster4" modified. + cat /tmp/tmp.EzaCDyJOzh + rm /tmp/tmp.x9mA5tHs6a /tmp/tmp.EzaCDyJOzh + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.HVASDpIvZS ++ mktemp + local LAST_ERR=/tmp/tmp.AFXnSqDN5k + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HVASDpIvZS customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.AFXnSqDN5k + rm /tmp/tmp.HVASDpIvZS /tmp/tmp.AFXnSqDN5k + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/deploy/cw-rbac.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.29uqJkOHs5 ++ mktemp + sed -e 's^namespace: .*^namespace: pxc-operator^' + local LAST_ERR=/tmp/tmp.LELJygDfRH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.29uqJkOHs5 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.LELJygDfRH + rm /tmp/tmp.29uqJkOHs5 /tmp/tmp.LELJygDfRH + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2199-baa7db2e^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/deploy/cw-operator.yaml + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - ++ mktemp + local LAST_OUT=/tmp/tmp.X6rJG1cx1v ++ mktemp + local LAST_ERR=/tmp/tmp.wXkyrIFzaL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.X6rJG1cx1v deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.wXkyrIFzaL + rm /tmp/tmp.X6rJG1cx1v /tmp/tmp.wXkyrIFzaL + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.Vafala4daC ++ mktemp + local LAST_ERR=/tmp/tmp.sS9yuDTcyJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Vafala4daC pod/percona-xtradb-cluster-operator-56f95ddfc4-lf25l condition met + cat /tmp/tmp.sS9yuDTcyJ + rm /tmp/tmp.Vafala4daC /tmp/tmp.sS9yuDTcyJ + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.PFN3JwwmLc +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ez2dWemREU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PFN3JwwmLc ++ cat /tmp/tmp.Ez2dWemREU ++ rm /tmp/tmp.PFN3JwwmLc /tmp/tmp.Ez2dWemREU ++ return 0 + wait_pod percona-xtradb-cluster-operator-56f95ddfc4-lf25l 480 pxc-operator + local pod=percona-xtradb-cluster-operator-56f95ddfc4-lf25l + local max_retry=480 + local ns=pxc-operator ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' ++ echo percona-xtradb-cluster-operator-56f95ddfc4-lf25l + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-56f95ddfc4-lf25l condition met waiting for pod/percona-xtradb-cluster-operator-56f95ddfc4-lf25l to become Ready.Ok + sleep 3 + create_namespace users-20579 + local namespace=users-20579 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ grep chaos-mesh.org ++ kubectl get crd ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-20579' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-20579 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-20579 ++ mktemp + kubectl_bin get ns + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.REuSJnKWsC ++ mktemp + local LAST_OUT=/tmp/tmp.pxglWIdDY3 ++ mktemp + local LAST_ERR=/tmp/tmp.BcUC04QEnF + local exit_status=0 + local LAST_ERR=/tmp/tmp.sRdLrJsMJ7 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-20579 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-20579 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.REuSJnKWsC + cat /tmp/tmp.BcUC04QEnF + rm /tmp/tmp.REuSJnKWsC /tmp/tmp.BcUC04QEnF + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-20579 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.pxglWIdDY3 + cat /tmp/tmp.sRdLrJsMJ7 Error from server (NotFound): namespaces "users-20579" not found + rm /tmp/tmp.pxglWIdDY3 /tmp/tmp.sRdLrJsMJ7 + return 1 + : + wait_for_delete namespace/users-20579 + local res=namespace/users-20579 + echo -n 'waiting for namespace/users-20579 to be deleted' waiting for namespace/users-20579 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-20579" not found + desc 'create namespace users-20579' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-20579 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-20579 ++ mktemp + local LAST_OUT=/tmp/tmp.3bHbamKgwY ++ mktemp + local LAST_ERR=/tmp/tmp.mt13mXRmAa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-20579 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3bHbamKgwY namespace/users-20579 created + cat /tmp/tmp.mt13mXRmAa + rm /tmp/tmp.3bHbamKgwY /tmp/tmp.mt13mXRmAa + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.uOdzk3ksrD +++ mktemp ++ local LAST_ERR=/tmp/tmp.azUxvRyUJ9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uOdzk3ksrD ++ cat /tmp/tmp.azUxvRyUJ9 ++ rm /tmp/tmp.uOdzk3ksrD /tmp/tmp.azUxvRyUJ9 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2199-baa7db2e-5-cluster4 --namespace=users-20579 ++ mktemp + local LAST_OUT=/tmp/tmp.FJ91DQ22vm ++ mktemp + local LAST_ERR=/tmp/tmp.piZrBJ732J + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2199-baa7db2e-5-cluster4 --namespace=users-20579 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FJ91DQ22vm Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2199-baa7db2e-5-cluster4" modified. + cat /tmp/tmp.piZrBJ732J + rm /tmp/tmp.FJ91DQ22vm /tmp/tmp.piZrBJ732J + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.YMJQf5YWIh ++ mktemp + local LAST_ERR=/tmp/tmp.OoBbaqMmo9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YMJQf5YWIh secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.OoBbaqMmo9 + rm /tmp/tmp.YMJQf5YWIh /tmp/tmp.OoBbaqMmo9 + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.U0fvGRZgzo ++ mktemp + local LAST_ERR=/tmp/tmp.ahn3UD4Qc0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.U0fvGRZgzo secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.ahn3UD4Qc0 + rm /tmp/tmp.U0fvGRZgzo /tmp/tmp.ahn3UD4Qc0 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.w4gLR3PBzU + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2199-baa7db2e#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-20579~ + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + local LAST_ERR=/tmp/tmp.f80eJ2xEVT + local exit_status=0 + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.w4gLR3PBzU deployment.apps/pxc-client created + cat /tmp/tmp.f80eJ2xEVT + rm /tmp/tmp.w4gLR3PBzU /tmp/tmp.f80eJ2xEVT + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2199-baa7db2e#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-20579~ ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_OUT=/tmp/tmp.WQInBNIyv8 ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_ERR=/tmp/tmp.8fw8vDt8Cc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WQInBNIyv8 perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.8fw8vDt8Cc + rm /tmp/tmp.WQInBNIyv8 /tmp/tmp.8fw8vDt8Cc + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.abiSKDxlng ++++ mktemp +++ local LAST_ERR=/tmp/tmp.BK5uj0sjuR +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.abiSKDxlng +++ cat /tmp/tmp.BK5uj0sjuR +++ rm /tmp/tmp.abiSKDxlng /tmp/tmp.BK5uj0sjuR +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.KUtP7vndY6 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.P9Ham3br9u +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.KUtP7vndY6 +++ cat /tmp/tmp.P9Ham3br9u +++ rm /tmp/tmp.KUtP7vndY6 /tmp/tmp.P9Ham3br9u +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-20579 ++ mktemp + local LAST_OUT=/tmp/tmp.i0Kt90Sw0r ++ mktemp + local LAST_ERR=/tmp/tmp.MpQuptsgwQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-20579 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-20579 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-20579 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.i0Kt90Sw0r + cat /tmp/tmp.MpQuptsgwQ error: no matching resources found + rm /tmp/tmp.i0Kt90Sw0r /tmp/tmp.MpQuptsgwQ + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.YplC7xmJcd +++ mktemp ++ local LAST_ERR=/tmp/tmp.DMgARhe8O7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YplC7xmJcd ++ cat /tmp/tmp.DMgARhe8O7 ++ rm /tmp/tmp.YplC7xmJcd /tmp/tmp.DMgARhe8O7 ++ return 0 + local 'root_pass=[7rwyDU[~.?Hed@w' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WkpKJePiIA +++ mktemp ++ local LAST_ERR=/tmp/tmp.tDRipwgJda ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WkpKJePiIA ++ cat /tmp/tmp.tDRipwgJda Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.WkpKJePiIA /tmp/tmp.tDRipwgJda ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x9SQitUusO +++ mktemp ++ local LAST_ERR=/tmp/tmp.MPqpQo7DuD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x9SQitUusO ++ cat /tmp/tmp.MPqpQo7DuD ++ rm /tmp/tmp.x9SQitUusO /tmp/tmp.MPqpQo7DuD ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mD0SuiQQcZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.y1Qs7aK2xR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mD0SuiQQcZ ++ cat /tmp/tmp.y1Qs7aK2xR ++ rm /tmp/tmp.mD0SuiQQcZ /tmp/tmp.y1Qs7aK2xR ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G04eFx7xIB +++ mktemp ++ local LAST_ERR=/tmp/tmp.m9u4uLotA1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G04eFx7xIB ++ cat /tmp/tmp.m9u4uLotA1 ++ rm /tmp/tmp.G04eFx7xIB /tmp/tmp.m9u4uLotA1 ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-1.sql /tmp/tmp.vQXhoSoFAX/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XxBT0qP5Le +++ mktemp ++ local LAST_ERR=/tmp/tmp.c7jm0DoKn1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XxBT0qP5Le ++ cat /tmp/tmp.c7jm0DoKn1 ++ rm /tmp/tmp.XxBT0qP5Le /tmp/tmp.c7jm0DoKn1 ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-1.sql /tmp/tmp.vQXhoSoFAX/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''[7rwyDU[~.?Hed@w'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D8XR8rwIcX +++ mktemp ++ local LAST_ERR=/tmp/tmp.V7iTEefJIX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D8XR8rwIcX ++ cat /tmp/tmp.V7iTEefJIX ++ rm /tmp/tmp.D8XR8rwIcX /tmp/tmp.V7iTEefJIX ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-1.sql /tmp/tmp.vQXhoSoFAX/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6K1gvXQPSl +++ mktemp ++ local LAST_ERR=/tmp/tmp.HMw2Kv2yhT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6K1gvXQPSl ++ cat /tmp/tmp.HMw2Kv2yhT Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.6K1gvXQPSl /tmp/tmp.HMw2Kv2yhT ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.6U6xAXrTbj +++ mktemp ++ local LAST_ERR=/tmp/tmp.EtxzxpnM6E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6U6xAXrTbj ++ cat /tmp/tmp.EtxzxpnM6E ++ rm /tmp/tmp.6U6xAXrTbj /tmp/tmp.EtxzxpnM6E ++ return 0 + secret_pass='[7rwyDU[~.?Hed@w' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.TAbh03B6JC +++ mktemp ++ local LAST_ERR=/tmp/tmp.C5r5wSxBoY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TAbh03B6JC ++ cat /tmp/tmp.C5r5wSxBoY ++ rm /tmp/tmp.TAbh03B6JC /tmp/tmp.C5r5wSxBoY ++ return 0 + int_secret_pass='[7rwyDU[~.?Hed@w' + [[ -z [7rwyDU[~.?Hed@w ]] + [[ [7rwyDU[~.?Hed@w != \[\7\r\w\y\D\U\[\~\.\?\H\e\d\@\w ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''[7rwyDU[~.?Hed@w'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''[7rwyDU[~.?Hed@w'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''[7rwyDU[~.?Hed@w'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''[7rwyDU[~.?Hed@w'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oTvOoz6Dyo +++ mktemp ++ local LAST_ERR=/tmp/tmp.hkQVYll0Wa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oTvOoz6Dyo ++ cat /tmp/tmp.hkQVYll0Wa ++ rm /tmp/tmp.oTvOoz6Dyo /tmp/tmp.hkQVYll0Wa ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql /tmp/tmp.vQXhoSoFAX/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.WvOjCoORTI +++ mktemp ++ local LAST_ERR=/tmp/tmp.gx9BaOa4D5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WvOjCoORTI ++ cat /tmp/tmp.gx9BaOa4D5 ++ rm /tmp/tmp.WvOjCoORTI /tmp/tmp.gx9BaOa4D5 ++ return 0 + secret_pass='qWpi3r)nV&Ox+nhR7' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.OrV0L6CTUz +++ mktemp ++ local LAST_ERR=/tmp/tmp.h6gpgpyP0f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OrV0L6CTUz ++ cat /tmp/tmp.h6gpgpyP0f ++ rm /tmp/tmp.OrV0L6CTUz /tmp/tmp.h6gpgpyP0f ++ return 0 + int_secret_pass='qWpi3r)nV&Ox+nhR7' + [[ -z qWpi3r)nV&Ox+nhR7 ]] + [[ qWpi3r)nV&Ox+nhR7 != \q\W\p\i\3\r\)\n\V\&\O\x\+\n\h\R\7 ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''qWpi3r)nV&Ox+nhR7'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''qWpi3r)nV&Ox+nhR7'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''qWpi3r)nV&Ox+nhR7'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''qWpi3r)nV&Ox+nhR7'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k6prdDEqeT +++ mktemp ++ local LAST_ERR=/tmp/tmp.jXPjSQfriW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k6prdDEqeT ++ cat /tmp/tmp.jXPjSQfriW ++ rm /tmp/tmp.k6prdDEqeT /tmp/tmp.jXPjSQfriW ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql /tmp/tmp.vQXhoSoFAX/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kaG9oQYDtc +++ mktemp ++ base64 --decode ++ local LAST_ERR=/tmp/tmp.8SdKdqiAqC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kaG9oQYDtc ++ cat /tmp/tmp.8SdKdqiAqC ++ rm /tmp/tmp.kaG9oQYDtc /tmp/tmp.8SdKdqiAqC ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.PGghr1FoxS +++ mktemp ++ local LAST_ERR=/tmp/tmp.xtE7C14yVj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PGghr1FoxS ++ cat /tmp/tmp.xtE7C14yVj ++ rm /tmp/tmp.PGghr1FoxS /tmp/tmp.xtE7C14yVj ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Orgy6W6WeM +++ mktemp ++ local LAST_ERR=/tmp/tmp.O0UfFjYaOg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Orgy6W6WeM ++ cat /tmp/tmp.O0UfFjYaOg ++ rm /tmp/tmp.Orgy6W6WeM /tmp/tmp.O0UfFjYaOg ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql /tmp/tmp.vQXhoSoFAX/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.m8iBTxc69r +++ mktemp ++ local LAST_ERR=/tmp/tmp.J8yWd949fy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m8iBTxc69r ++ cat /tmp/tmp.J8yWd949fy ++ rm /tmp/tmp.m8iBTxc69r /tmp/tmp.J8yWd949fy ++ return 0 + secret_pass='IplOPmoj,i4{m=D32' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jIlPJVFnnF +++ mktemp ++ local LAST_ERR=/tmp/tmp.29iAhXUiyp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jIlPJVFnnF ++ cat /tmp/tmp.29iAhXUiyp ++ rm /tmp/tmp.jIlPJVFnnF /tmp/tmp.29iAhXUiyp ++ return 0 + int_secret_pass='IplOPmoj,i4{m=D32' + [[ -z IplOPmoj,i4{m=D32 ]] + [[ IplOPmoj,i4{m=D32 != \I\p\l\O\P\m\o\j\,\i\4\{\m\=\D\3\2 ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''IplOPmoj,i4{m=D32'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''IplOPmoj,i4{m=D32'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''IplOPmoj,i4{m=D32'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''IplOPmoj,i4{m=D32'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-2.sql /tmp/tmp.vQXhoSoFAX/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.CI0bqgKU5o +++ mktemp ++ local LAST_ERR=/tmp/tmp.RLjNSmORlh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CI0bqgKU5o ++ cat /tmp/tmp.RLjNSmORlh ++ rm /tmp/tmp.CI0bqgKU5o /tmp/tmp.RLjNSmORlh ++ return 0 + secret_pass='-m_-lyNQgyUC!Rgz' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' +++ mktemp ++ base64 --decode ++ local LAST_OUT=/tmp/tmp.TfFoTgGcnJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.zNQ2qa04Zo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TfFoTgGcnJ ++ cat /tmp/tmp.zNQ2qa04Zo ++ rm /tmp/tmp.TfFoTgGcnJ /tmp/tmp.zNQ2qa04Zo ++ return 0 + int_secret_pass='-m_-lyNQgyUC!Rgz' + [[ -z -m_-lyNQgyUC!Rgz ]] + [[ -m_-lyNQgyUC!Rgz != \-\m\_\-\l\y\N\Q\g\y\U\C\!\R\g\z ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''-m_-lyNQgyUC!Rgz'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''-m_-lyNQgyUC!Rgz'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''-m_-lyNQgyUC!Rgz'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''-m_-lyNQgyUC!Rgz'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r3eNIeYmdy +++ mktemp ++ local LAST_ERR=/tmp/tmp.PEqrxTVvlB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r3eNIeYmdy ++ cat /tmp/tmp.PEqrxTVvlB ++ rm /tmp/tmp.r3eNIeYmdy /tmp/tmp.PEqrxTVvlB ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql /tmp/tmp.vQXhoSoFAX/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.p4ojCAvgWZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.4bngpDmhOY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p4ojCAvgWZ ++ cat /tmp/tmp.4bngpDmhOY ++ rm /tmp/tmp.p4ojCAvgWZ /tmp/tmp.4bngpDmhOY ++ return 0 + secret_pass='T~)5#D,jJ_P?WBI,U' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.p9qfIyTcUD +++ mktemp ++ local LAST_ERR=/tmp/tmp.qV73gA5f5a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p9qfIyTcUD ++ cat /tmp/tmp.qV73gA5f5a ++ rm /tmp/tmp.p9qfIyTcUD /tmp/tmp.qV73gA5f5a ++ return 0 + int_secret_pass='T~)5#D,jJ_P?WBI,U' + [[ -z T~)5#D,jJ_P?WBI,U ]] + [[ T~)5#D,jJ_P?WBI,U != \T\~\)\5\#\D\,\j\J\_\P\?\W\B\I\,\U ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''T~)5#D,jJ_P?WBI,U'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''T~)5#D,jJ_P?WBI,U'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''T~)5#D,jJ_P?WBI,U'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''T~)5#D,jJ_P?WBI,U'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GjxS6tspVq +++ mktemp ++ local LAST_ERR=/tmp/tmp.Pc3wRzNvEG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GjxS6tspVq ++ cat /tmp/tmp.Pc3wRzNvEG ++ rm /tmp/tmp.GjxS6tspVq /tmp/tmp.Pc3wRzNvEG ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql /tmp/tmp.vQXhoSoFAX/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.OG261cF7cm ++ mktemp + local LAST_ERR=/tmp/tmp.8w3IoG2Abz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OG261cF7cm secret/my-cluster-secrets patched + cat /tmp/tmp.8w3IoG2Abz + rm /tmp/tmp.OG261cF7cm /tmp/tmp.8w3IoG2Abz + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FisBV4B6HM +++ mktemp ++ local LAST_ERR=/tmp/tmp.BroPgAMPkQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FisBV4B6HM ++ cat /tmp/tmp.BroPgAMPkQ ++ rm /tmp/tmp.FisBV4B6HM /tmp/tmp.BroPgAMPkQ ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql /tmp/tmp.vQXhoSoFAX/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.lsqMzFzcjm ++ mktemp + local LAST_ERR=/tmp/tmp.y4xSmHpyzw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lsqMzFzcjm perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.y4xSmHpyzw + rm /tmp/tmp.lsqMzFzcjm /tmp/tmp.y4xSmHpyzw + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yTN9ArEE8l +++ mktemp ++ local LAST_ERR=/tmp/tmp.UVxXlCE0zj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yTN9ArEE8l ++ cat /tmp/tmp.UVxXlCE0zj ++ rm /tmp/tmp.yTN9ArEE8l /tmp/tmp.UVxXlCE0zj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wb8PjVd2Fh +++ mktemp ++ local LAST_ERR=/tmp/tmp.0NDvitHR8v ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Wb8PjVd2Fh ++ cat /tmp/tmp.0NDvitHR8v ++ rm /tmp/tmp.Wb8PjVd2Fh /tmp/tmp.0NDvitHR8v ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E4FI4X2AlQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.uxzS1P9wru ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E4FI4X2AlQ ++ cat /tmp/tmp.uxzS1P9wru ++ rm /tmp/tmp.E4FI4X2AlQ /tmp/tmp.uxzS1P9wru ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Zgx4uxDV6C ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UotuUw2ppl +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Zgx4uxDV6C +++++ cat /tmp/tmp.UotuUw2ppl +++++ rm /tmp/tmp.Zgx4uxDV6C /tmp/tmp.UotuUw2ppl +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.COLCZXGbVt ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.r5Ctx9HMec +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.COLCZXGbVt +++++ cat /tmp/tmp.r5Ctx9HMec +++++ rm /tmp/tmp.COLCZXGbVt /tmp/tmp.r5Ctx9HMec +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VjlpFDatMh +++ mktemp ++ local LAST_ERR=/tmp/tmp.Uqt1vcdGrj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VjlpFDatMh ++ cat /tmp/tmp.Uqt1vcdGrj ++ rm /tmp/tmp.VjlpFDatMh /tmp/tmp.Uqt1vcdGrj ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.brLNYrhnQu ++ mktemp + local LAST_ERR=/tmp/tmp.xI7o3qrwrm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.brLNYrhnQu secret/my-cluster-secrets patched + cat /tmp/tmp.xI7o3qrwrm + rm /tmp/tmp.brLNYrhnQu /tmp/tmp.xI7o3qrwrm + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YYq327KCJN +++ mktemp ++ local LAST_ERR=/tmp/tmp.AQNyHY5ko3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YYq327KCJN ++ cat /tmp/tmp.AQNyHY5ko3 ++ rm /tmp/tmp.YYq327KCJN /tmp/tmp.AQNyHY5ko3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VtRUm9pKki +++ mktemp ++ local LAST_ERR=/tmp/tmp.XMhQrBuZmm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VtRUm9pKki ++ cat /tmp/tmp.XMhQrBuZmm ++ rm /tmp/tmp.VtRUm9pKki /tmp/tmp.XMhQrBuZmm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zWVk3ELDqB +++ mktemp ++ local LAST_ERR=/tmp/tmp.3MG5DncZl8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zWVk3ELDqB ++ cat /tmp/tmp.3MG5DncZl8 ++ rm /tmp/tmp.zWVk3ELDqB /tmp/tmp.3MG5DncZl8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.piChv3a68a +++ mktemp ++ local LAST_ERR=/tmp/tmp.n4xBouzFLc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.piChv3a68a ++ cat /tmp/tmp.n4xBouzFLc ++ rm /tmp/tmp.piChv3a68a /tmp/tmp.n4xBouzFLc ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MDhXIpEF5L +++ mktemp ++ local LAST_ERR=/tmp/tmp.R82caiWMh7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MDhXIpEF5L ++ cat /tmp/tmp.R82caiWMh7 ++ rm /tmp/tmp.MDhXIpEF5L /tmp/tmp.R82caiWMh7 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.64RzpsdRVw ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.P702UZLORU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.64RzpsdRVw +++++ cat /tmp/tmp.P702UZLORU +++++ rm /tmp/tmp.64RzpsdRVw /tmp/tmp.P702UZLORU +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KVx9P9JeeQ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vXruxEuQhZ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KVx9P9JeeQ +++++ cat /tmp/tmp.vXruxEuQhZ +++++ rm /tmp/tmp.KVx9P9JeeQ /tmp/tmp.vXruxEuQhZ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s1ZxmJMrym +++ mktemp ++ local LAST_ERR=/tmp/tmp.sIRr6daPR5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s1ZxmJMrym ++ cat /tmp/tmp.sIRr6daPR5 ++ rm /tmp/tmp.s1ZxmJMrym /tmp/tmp.sIRr6daPR5 ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-2.sql /tmp/tmp.vQXhoSoFAX/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-2.sql /tmp/tmp.vQXhoSoFAX/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-2.sql /tmp/tmp.vQXhoSoFAX/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.61jvFFuHag ++ mktemp + local LAST_ERR=/tmp/tmp.4gFLEvwEZ4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.61jvFFuHag perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.4gFLEvwEZ4 + rm /tmp/tmp.61jvFFuHag /tmp/tmp.4gFLEvwEZ4 + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.KhDEMzdIJ3 ++ mktemp + local LAST_ERR=/tmp/tmp.H6sASavoG7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KhDEMzdIJ3 secret/my-cluster-secrets patched + cat /tmp/tmp.H6sASavoG7 + rm /tmp/tmp.KhDEMzdIJ3 /tmp/tmp.H6sASavoG7 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BdtekUxl43 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LkfDK587KZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BdtekUxl43 ++ cat /tmp/tmp.LkfDK587KZ ++ rm /tmp/tmp.BdtekUxl43 /tmp/tmp.LkfDK587KZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WHNFkKI7qg +++ mktemp ++ local LAST_ERR=/tmp/tmp.frqZgy7cH3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WHNFkKI7qg ++ cat /tmp/tmp.frqZgy7cH3 ++ rm /tmp/tmp.WHNFkKI7qg /tmp/tmp.frqZgy7cH3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AB9k4zjWJy +++ mktemp ++ local LAST_ERR=/tmp/tmp.LATc59iaNf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AB9k4zjWJy ++ cat /tmp/tmp.LATc59iaNf ++ rm /tmp/tmp.AB9k4zjWJy /tmp/tmp.LATc59iaNf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wJkPSmQ7Pt +++ mktemp ++ local LAST_ERR=/tmp/tmp.YbTzBiaRpH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wJkPSmQ7Pt ++ cat /tmp/tmp.YbTzBiaRpH ++ rm /tmp/tmp.wJkPSmQ7Pt /tmp/tmp.YbTzBiaRpH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oJiVuvaeay +++ mktemp ++ local LAST_ERR=/tmp/tmp.BGh3VO5mE7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oJiVuvaeay ++ cat /tmp/tmp.BGh3VO5mE7 ++ rm /tmp/tmp.oJiVuvaeay /tmp/tmp.BGh3VO5mE7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6Q0qgBTRXM +++ mktemp ++ local LAST_ERR=/tmp/tmp.zrKSo40eFH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6Q0qgBTRXM ++ cat /tmp/tmp.zrKSo40eFH ++ rm /tmp/tmp.6Q0qgBTRXM /tmp/tmp.zrKSo40eFH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e9BQ9Gqs1M +++ mktemp ++ local LAST_ERR=/tmp/tmp.1JYwsXXWYi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e9BQ9Gqs1M ++ cat /tmp/tmp.1JYwsXXWYi ++ rm /tmp/tmp.e9BQ9Gqs1M /tmp/tmp.1JYwsXXWYi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ukuamd2SVg +++ mktemp ++ local LAST_ERR=/tmp/tmp.OpsYAzCsNK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ukuamd2SVg ++ cat /tmp/tmp.OpsYAzCsNK ++ rm /tmp/tmp.ukuamd2SVg /tmp/tmp.OpsYAzCsNK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6OrU0bz5iS +++ mktemp ++ local LAST_ERR=/tmp/tmp.rYVwghk7UO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6OrU0bz5iS ++ cat /tmp/tmp.rYVwghk7UO ++ rm /tmp/tmp.6OrU0bz5iS /tmp/tmp.rYVwghk7UO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.282pF917eF +++ mktemp ++ local LAST_ERR=/tmp/tmp.G74MrqZEzT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.282pF917eF ++ cat /tmp/tmp.G74MrqZEzT ++ rm /tmp/tmp.282pF917eF /tmp/tmp.G74MrqZEzT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hMHolkn12a +++ mktemp ++ local LAST_ERR=/tmp/tmp.XMHhvhEDVy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hMHolkn12a ++ cat /tmp/tmp.XMHhvhEDVy ++ rm /tmp/tmp.hMHolkn12a /tmp/tmp.XMHhvhEDVy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0IbRJeoEcs +++ mktemp ++ local LAST_ERR=/tmp/tmp.snsHEUYOhv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0IbRJeoEcs ++ cat /tmp/tmp.snsHEUYOhv ++ rm /tmp/tmp.0IbRJeoEcs /tmp/tmp.snsHEUYOhv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x4AUkv6o1w +++ mktemp ++ local LAST_ERR=/tmp/tmp.uQbfiE38u6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x4AUkv6o1w ++ cat /tmp/tmp.uQbfiE38u6 ++ rm /tmp/tmp.x4AUkv6o1w /tmp/tmp.uQbfiE38u6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UiZzBE8JsZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.iztrJG9gs2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UiZzBE8JsZ ++ cat /tmp/tmp.iztrJG9gs2 ++ rm /tmp/tmp.UiZzBE8JsZ /tmp/tmp.iztrJG9gs2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KT4eGEDW92 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8G6hnrNDqB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KT4eGEDW92 ++ cat /tmp/tmp.8G6hnrNDqB ++ rm /tmp/tmp.KT4eGEDW92 /tmp/tmp.8G6hnrNDqB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.auOYoKpola +++ mktemp ++ local LAST_ERR=/tmp/tmp.2keDWfEGCG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.auOYoKpola ++ cat /tmp/tmp.2keDWfEGCG ++ rm /tmp/tmp.auOYoKpola /tmp/tmp.2keDWfEGCG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J21m4hb3Bt +++ mktemp ++ local LAST_ERR=/tmp/tmp.lcyUOpUdd0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J21m4hb3Bt ++ cat /tmp/tmp.lcyUOpUdd0 ++ rm /tmp/tmp.J21m4hb3Bt /tmp/tmp.lcyUOpUdd0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DjPx9v0XJT +++ mktemp ++ local LAST_ERR=/tmp/tmp.OofXvGPN6e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DjPx9v0XJT ++ cat /tmp/tmp.OofXvGPN6e ++ rm /tmp/tmp.DjPx9v0XJT /tmp/tmp.OofXvGPN6e ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5z1aLHjEt5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.T76zP6FXub ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5z1aLHjEt5 ++ cat /tmp/tmp.T76zP6FXub ++ rm /tmp/tmp.5z1aLHjEt5 /tmp/tmp.T76zP6FXub ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Kp6QmMwGM0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dnAv7mjptV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Kp6QmMwGM0 ++ cat /tmp/tmp.dnAv7mjptV ++ rm /tmp/tmp.Kp6QmMwGM0 /tmp/tmp.dnAv7mjptV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NgktHeTnxe +++ mktemp ++ local LAST_ERR=/tmp/tmp.hdcyz7hcYb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NgktHeTnxe ++ cat /tmp/tmp.hdcyz7hcYb ++ rm /tmp/tmp.NgktHeTnxe /tmp/tmp.hdcyz7hcYb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C2vffWWwNU +++ mktemp ++ local LAST_ERR=/tmp/tmp.TLU81VKLKG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.C2vffWWwNU ++ cat /tmp/tmp.TLU81VKLKG ++ rm /tmp/tmp.C2vffWWwNU /tmp/tmp.TLU81VKLKG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8MjXtZBrdo +++ mktemp ++ local LAST_ERR=/tmp/tmp.WtLM9IjjiF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8MjXtZBrdo ++ cat /tmp/tmp.WtLM9IjjiF ++ rm /tmp/tmp.8MjXtZBrdo /tmp/tmp.WtLM9IjjiF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F6glqLiCEb +++ mktemp ++ local LAST_ERR=/tmp/tmp.R3I8thdhWY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F6glqLiCEb ++ cat /tmp/tmp.R3I8thdhWY ++ rm /tmp/tmp.F6glqLiCEb /tmp/tmp.R3I8thdhWY ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HVfWtLHzUq +++ mktemp ++ local LAST_ERR=/tmp/tmp.cf3jNX7GMq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HVfWtLHzUq ++ cat /tmp/tmp.cf3jNX7GMq ++ rm /tmp/tmp.HVfWtLHzUq /tmp/tmp.cf3jNX7GMq ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.MvIN3xPPRR ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.EoNXeNcz9b +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.MvIN3xPPRR +++++ cat /tmp/tmp.EoNXeNcz9b +++++ rm /tmp/tmp.MvIN3xPPRR /tmp/tmp.EoNXeNcz9b +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.XsVtMZ4kad ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZXxoW4IIfH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.XsVtMZ4kad +++++ cat /tmp/tmp.ZXxoW4IIfH +++++ rm /tmp/tmp.XsVtMZ4kad /tmp/tmp.ZXxoW4IIfH +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xc2xJf5Ipa +++ mktemp ++ local LAST_ERR=/tmp/tmp.NySakniXDF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Xc2xJf5Ipa ++ cat /tmp/tmp.NySakniXDF ++ rm /tmp/tmp.Xc2xJf5Ipa /tmp/tmp.NySakniXDF ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-3.sql /tmp/tmp.vQXhoSoFAX/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.zsa0qG1aNl ++ mktemp + local LAST_ERR=/tmp/tmp.hNMfTt0iqD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zsa0qG1aNl secret/my-cluster-secrets patched + cat /tmp/tmp.hNMfTt0iqD + rm /tmp/tmp.zsa0qG1aNl /tmp/tmp.hNMfTt0iqD + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ base64 --decode ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TL9qZwBA0E +++ mktemp ++ local LAST_ERR=/tmp/tmp.6WI9umR6BC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TL9qZwBA0E ++ cat /tmp/tmp.6WI9umR6BC ++ rm /tmp/tmp.TL9qZwBA0E /tmp/tmp.6WI9umR6BC ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jwb1YFOBhh +++ mktemp ++ local LAST_ERR=/tmp/tmp.oxWpkMKPtf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Jwb1YFOBhh ++ cat /tmp/tmp.oxWpkMKPtf ++ rm /tmp/tmp.Jwb1YFOBhh /tmp/tmp.oxWpkMKPtf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7OC5pfmyWE +++ mktemp ++ local LAST_ERR=/tmp/tmp.kj5ChjT4R2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7OC5pfmyWE ++ cat /tmp/tmp.kj5ChjT4R2 ++ rm /tmp/tmp.7OC5pfmyWE /tmp/tmp.kj5ChjT4R2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OBDVhxgjoV +++ mktemp ++ local LAST_ERR=/tmp/tmp.bL8S6LSjNM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OBDVhxgjoV ++ cat /tmp/tmp.bL8S6LSjNM ++ rm /tmp/tmp.OBDVhxgjoV /tmp/tmp.bL8S6LSjNM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7P2NDhrWH6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YLrH8Go4so ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7P2NDhrWH6 ++ cat /tmp/tmp.YLrH8Go4so ++ rm /tmp/tmp.7P2NDhrWH6 /tmp/tmp.YLrH8Go4so ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xw5KS9fL1i +++ mktemp ++ local LAST_ERR=/tmp/tmp.H44oMLZDip ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Xw5KS9fL1i ++ cat /tmp/tmp.H44oMLZDip ++ rm /tmp/tmp.Xw5KS9fL1i /tmp/tmp.H44oMLZDip ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AD2j7mbKc8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yXg8FaZhTN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AD2j7mbKc8 ++ cat /tmp/tmp.yXg8FaZhTN ++ rm /tmp/tmp.AD2j7mbKc8 /tmp/tmp.yXg8FaZhTN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wc8szbOKaL +++ mktemp ++ local LAST_ERR=/tmp/tmp.2zrnsYaUXN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wc8szbOKaL ++ cat /tmp/tmp.2zrnsYaUXN ++ rm /tmp/tmp.wc8szbOKaL /tmp/tmp.2zrnsYaUXN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yeEt4J56yb +++ mktemp ++ local LAST_ERR=/tmp/tmp.HtCFHidnBN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yeEt4J56yb ++ cat /tmp/tmp.HtCFHidnBN ++ rm /tmp/tmp.yeEt4J56yb /tmp/tmp.HtCFHidnBN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XhqxH9wrQx +++ mktemp ++ local LAST_ERR=/tmp/tmp.1jjlwzmhkU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XhqxH9wrQx ++ cat /tmp/tmp.1jjlwzmhkU ++ rm /tmp/tmp.XhqxH9wrQx /tmp/tmp.1jjlwzmhkU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ovuWkl08Rg +++ mktemp ++ local LAST_ERR=/tmp/tmp.26WGLNFJhH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ovuWkl08Rg ++ cat /tmp/tmp.26WGLNFJhH ++ rm /tmp/tmp.ovuWkl08Rg /tmp/tmp.26WGLNFJhH ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mH6ACHOkht ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.D0SOHUKx0H +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mH6ACHOkht +++++ cat /tmp/tmp.D0SOHUKx0H +++++ rm /tmp/tmp.mH6ACHOkht /tmp/tmp.D0SOHUKx0H +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.s8ya3Yvwbe ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.S32bZDIejO +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.s8ya3Yvwbe +++++ cat /tmp/tmp.S32bZDIejO +++++ rm /tmp/tmp.s8ya3Yvwbe /tmp/tmp.S32bZDIejO +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qnThhJYQmI +++ mktemp ++ local LAST_ERR=/tmp/tmp.cNUUKVRFDY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qnThhJYQmI ++ cat /tmp/tmp.cNUUKVRFDY ++ rm /tmp/tmp.qnThhJYQmI /tmp/tmp.cNUUKVRFDY ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mSmz8xi4ZR +++ mktemp ++ local LAST_ERR=/tmp/tmp.N78zesD8lZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mSmz8xi4ZR ++ cat /tmp/tmp.N78zesD8lZ ++ rm /tmp/tmp.mSmz8xi4ZR /tmp/tmp.N78zesD8lZ ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql /tmp/tmp.vQXhoSoFAX/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.GNcouvpDe6 ++ mktemp + local LAST_ERR=/tmp/tmp.xWTAHgdhEZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GNcouvpDe6 secret/my-cluster-secrets patched + cat /tmp/tmp.xWTAHgdhEZ + rm /tmp/tmp.GNcouvpDe6 /tmp/tmp.xWTAHgdhEZ + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fI7yCy84tV +++ mktemp ++ local LAST_ERR=/tmp/tmp.aif9vGp4v9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fI7yCy84tV ++ cat /tmp/tmp.aif9vGp4v9 ++ rm /tmp/tmp.fI7yCy84tV /tmp/tmp.aif9vGp4v9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.864tntO05m +++ mktemp ++ local LAST_ERR=/tmp/tmp.lRdFZvLK5u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.864tntO05m ++ cat /tmp/tmp.lRdFZvLK5u ++ rm /tmp/tmp.864tntO05m /tmp/tmp.lRdFZvLK5u ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BX0z2Meo2B +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bn5kgKV1pY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BX0z2Meo2B ++ cat /tmp/tmp.Bn5kgKV1pY ++ rm /tmp/tmp.BX0z2Meo2B /tmp/tmp.Bn5kgKV1pY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wVRc3ryzl0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WKtdepaLRI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wVRc3ryzl0 ++ cat /tmp/tmp.WKtdepaLRI ++ rm /tmp/tmp.wVRc3ryzl0 /tmp/tmp.WKtdepaLRI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PQ8xd8jOcW +++ mktemp ++ local LAST_ERR=/tmp/tmp.IptZtLW1cT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PQ8xd8jOcW ++ cat /tmp/tmp.IptZtLW1cT ++ rm /tmp/tmp.PQ8xd8jOcW /tmp/tmp.IptZtLW1cT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ARmlkBhbd7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.niTwcHsX3J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ARmlkBhbd7 ++ cat /tmp/tmp.niTwcHsX3J ++ rm /tmp/tmp.ARmlkBhbd7 /tmp/tmp.niTwcHsX3J ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tbEd4zp5Z2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NplxbeHXnR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tbEd4zp5Z2 ++ cat /tmp/tmp.NplxbeHXnR ++ rm /tmp/tmp.tbEd4zp5Z2 /tmp/tmp.NplxbeHXnR ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.J8xCxnSzZk ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.fa8n4MeaWh +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.J8xCxnSzZk +++++ cat /tmp/tmp.fa8n4MeaWh +++++ rm /tmp/tmp.J8xCxnSzZk /tmp/tmp.fa8n4MeaWh +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2LJiqRvGSG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.49SL7dJeBe +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2LJiqRvGSG +++++ cat /tmp/tmp.49SL7dJeBe +++++ rm /tmp/tmp.2LJiqRvGSG /tmp/tmp.49SL7dJeBe +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TEYQqYfAoZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.yqhLKhnMuj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TEYQqYfAoZ ++ cat /tmp/tmp.yqhLKhnMuj ++ rm /tmp/tmp.TEYQqYfAoZ /tmp/tmp.yqhLKhnMuj ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yDwmYi2aCt +++ mktemp ++ local LAST_ERR=/tmp/tmp.JbhiFePRfV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yDwmYi2aCt ++ cat /tmp/tmp.JbhiFePRfV ++ rm /tmp/tmp.yDwmYi2aCt /tmp/tmp.JbhiFePRfV ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql /tmp/tmp.vQXhoSoFAX/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.kn1eacwZFv ++ mktemp + local LAST_ERR=/tmp/tmp.Njf7GT092m + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kn1eacwZFv perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.Njf7GT092m + rm /tmp/tmp.kn1eacwZFv /tmp/tmp.Njf7GT092m + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1Ojoz5WLpX +++ mktemp ++ local LAST_ERR=/tmp/tmp.pvYtiyTYBV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1Ojoz5WLpX ++ cat /tmp/tmp.pvYtiyTYBV ++ rm /tmp/tmp.1Ojoz5WLpX /tmp/tmp.pvYtiyTYBV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.teTavppNNg +++ mktemp ++ local LAST_ERR=/tmp/tmp.MzGjSnqQfw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.teTavppNNg ++ cat /tmp/tmp.MzGjSnqQfw ++ rm /tmp/tmp.teTavppNNg /tmp/tmp.MzGjSnqQfw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SWjFPlLLpo +++ mktemp ++ local LAST_ERR=/tmp/tmp.2KwPdANF9b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SWjFPlLLpo ++ cat /tmp/tmp.2KwPdANF9b ++ rm /tmp/tmp.SWjFPlLLpo /tmp/tmp.2KwPdANF9b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HDAa06vnr0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GV1agOvzR2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HDAa06vnr0 ++ cat /tmp/tmp.GV1agOvzR2 ++ rm /tmp/tmp.HDAa06vnr0 /tmp/tmp.GV1agOvzR2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eXtEre22OZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.pnJ2jvaqbP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eXtEre22OZ ++ cat /tmp/tmp.pnJ2jvaqbP ++ rm /tmp/tmp.eXtEre22OZ /tmp/tmp.pnJ2jvaqbP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MmgYtE5IDD +++ mktemp ++ local LAST_ERR=/tmp/tmp.rWZKcRZFjs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MmgYtE5IDD ++ cat /tmp/tmp.rWZKcRZFjs ++ rm /tmp/tmp.MmgYtE5IDD /tmp/tmp.rWZKcRZFjs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oAHH73QCCa +++ mktemp ++ local LAST_ERR=/tmp/tmp.8UEn4nY2E1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oAHH73QCCa ++ cat /tmp/tmp.8UEn4nY2E1 ++ rm /tmp/tmp.oAHH73QCCa /tmp/tmp.8UEn4nY2E1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ruqc46wvB9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vxsyqjdxkA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ruqc46wvB9 ++ cat /tmp/tmp.vxsyqjdxkA ++ rm /tmp/tmp.ruqc46wvB9 /tmp/tmp.vxsyqjdxkA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B85DAxBhUe +++ mktemp ++ local LAST_ERR=/tmp/tmp.oPupSsNhGO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B85DAxBhUe ++ cat /tmp/tmp.oPupSsNhGO ++ rm /tmp/tmp.B85DAxBhUe /tmp/tmp.oPupSsNhGO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PL4OePqMme +++ mktemp ++ local LAST_ERR=/tmp/tmp.sNsY6pndv7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PL4OePqMme ++ cat /tmp/tmp.sNsY6pndv7 ++ rm /tmp/tmp.PL4OePqMme /tmp/tmp.sNsY6pndv7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M6e3XrV6FW +++ mktemp ++ local LAST_ERR=/tmp/tmp.hzwYwnl6Ag ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M6e3XrV6FW ++ cat /tmp/tmp.hzwYwnl6Ag ++ rm /tmp/tmp.M6e3XrV6FW /tmp/tmp.hzwYwnl6Ag ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZgOTgtnkoN +++ mktemp ++ local LAST_ERR=/tmp/tmp.9gsHkrLJkc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZgOTgtnkoN ++ cat /tmp/tmp.9gsHkrLJkc ++ rm /tmp/tmp.ZgOTgtnkoN /tmp/tmp.9gsHkrLJkc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iowDvArJvs +++ mktemp ++ local LAST_ERR=/tmp/tmp.cGpurxhD88 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iowDvArJvs ++ cat /tmp/tmp.cGpurxhD88 ++ rm /tmp/tmp.iowDvArJvs /tmp/tmp.cGpurxhD88 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CtJRtXwk4m +++ mktemp ++ local LAST_ERR=/tmp/tmp.jjXPikyQ6y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CtJRtXwk4m ++ cat /tmp/tmp.jjXPikyQ6y ++ rm /tmp/tmp.CtJRtXwk4m /tmp/tmp.jjXPikyQ6y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WMYg2ELxAQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.KCqOUXmbvt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WMYg2ELxAQ ++ cat /tmp/tmp.KCqOUXmbvt ++ rm /tmp/tmp.WMYg2ELxAQ /tmp/tmp.KCqOUXmbvt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jayXqeeGkd +++ mktemp ++ local LAST_ERR=/tmp/tmp.gZdZ7RYiOw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jayXqeeGkd ++ cat /tmp/tmp.gZdZ7RYiOw ++ rm /tmp/tmp.jayXqeeGkd /tmp/tmp.gZdZ7RYiOw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VpvaRjZ1Wq +++ mktemp ++ local LAST_ERR=/tmp/tmp.PIMRWinegN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VpvaRjZ1Wq ++ cat /tmp/tmp.PIMRWinegN ++ rm /tmp/tmp.VpvaRjZ1Wq /tmp/tmp.PIMRWinegN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lIk1ZQonuR +++ mktemp ++ local LAST_ERR=/tmp/tmp.ebMbIMLdp8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lIk1ZQonuR ++ cat /tmp/tmp.ebMbIMLdp8 ++ rm /tmp/tmp.lIk1ZQonuR /tmp/tmp.ebMbIMLdp8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sugZYIBjby +++ mktemp ++ local LAST_ERR=/tmp/tmp.wdICGXl0Dg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sugZYIBjby ++ cat /tmp/tmp.wdICGXl0Dg ++ rm /tmp/tmp.sugZYIBjby /tmp/tmp.wdICGXl0Dg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VDcKvtw5N6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8xAHPOHbgo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VDcKvtw5N6 ++ cat /tmp/tmp.8xAHPOHbgo ++ rm /tmp/tmp.VDcKvtw5N6 /tmp/tmp.8xAHPOHbgo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4NvBxCfcLX +++ mktemp ++ local LAST_ERR=/tmp/tmp.TTy0OEfvzz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4NvBxCfcLX ++ cat /tmp/tmp.TTy0OEfvzz ++ rm /tmp/tmp.4NvBxCfcLX /tmp/tmp.TTy0OEfvzz ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ij1EUI54l3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bS05cLmyVq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ij1EUI54l3 ++ cat /tmp/tmp.bS05cLmyVq ++ rm /tmp/tmp.ij1EUI54l3 /tmp/tmp.bS05cLmyVq ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.TvLEDnWP3P ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.y0BlzWQwzM +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.TvLEDnWP3P +++++ cat /tmp/tmp.y0BlzWQwzM +++++ rm /tmp/tmp.TvLEDnWP3P /tmp/tmp.y0BlzWQwzM +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Mo5c9PHq0o ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vRDTkJP5qi +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Mo5c9PHq0o +++++ cat /tmp/tmp.vRDTkJP5qi +++++ rm /tmp/tmp.Mo5c9PHq0o /tmp/tmp.vRDTkJP5qi +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ElnTrNJO9b +++ mktemp ++ local LAST_ERR=/tmp/tmp.g2yBq8tXCf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ElnTrNJO9b ++ cat /tmp/tmp.g2yBq8tXCf ++ rm /tmp/tmp.ElnTrNJO9b /tmp/tmp.g2yBq8tXCf ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.q3Y8sRqqtM ++ mktemp + local LAST_ERR=/tmp/tmp.65NBlu4SWs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.q3Y8sRqqtM secret/my-cluster-secrets-2 patched + cat /tmp/tmp.65NBlu4SWs + rm /tmp/tmp.q3Y8sRqqtM /tmp/tmp.65NBlu4SWs + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aY2I91K6zd +++ mktemp ++ local LAST_ERR=/tmp/tmp.hZmz6ACtmB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aY2I91K6zd ++ cat /tmp/tmp.hZmz6ACtmB ++ rm /tmp/tmp.aY2I91K6zd /tmp/tmp.hZmz6ACtmB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gNSV48mx3y +++ mktemp ++ local LAST_ERR=/tmp/tmp.Izg8424oGW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gNSV48mx3y ++ cat /tmp/tmp.Izg8424oGW ++ rm /tmp/tmp.gNSV48mx3y /tmp/tmp.Izg8424oGW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uKMReiKiYV +++ mktemp ++ local LAST_ERR=/tmp/tmp.fFuDMLXJr8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uKMReiKiYV ++ cat /tmp/tmp.fFuDMLXJr8 ++ rm /tmp/tmp.uKMReiKiYV /tmp/tmp.fFuDMLXJr8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4ZRVXFZ7ti +++ mktemp ++ local LAST_ERR=/tmp/tmp.uVhJFcfEIi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4ZRVXFZ7ti ++ cat /tmp/tmp.uVhJFcfEIi ++ rm /tmp/tmp.4ZRVXFZ7ti /tmp/tmp.uVhJFcfEIi ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NNiuHMrGeg +++ mktemp ++ local LAST_ERR=/tmp/tmp.TpOoAMotrB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NNiuHMrGeg ++ cat /tmp/tmp.TpOoAMotrB ++ rm /tmp/tmp.NNiuHMrGeg /tmp/tmp.TpOoAMotrB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.phMAuh3SZe ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MUULggLlGq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.phMAuh3SZe +++++ cat /tmp/tmp.MUULggLlGq +++++ rm /tmp/tmp.phMAuh3SZe /tmp/tmp.MUULggLlGq +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.hMPRcTG1zp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.JImuWiPepu +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.hMPRcTG1zp +++++ cat /tmp/tmp.JImuWiPepu +++++ rm /tmp/tmp.hMPRcTG1zp /tmp/tmp.JImuWiPepu +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1gYg8pCGrT +++ mktemp ++ local LAST_ERR=/tmp/tmp.1bEmtc9oux ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1gYg8pCGrT ++ cat /tmp/tmp.1bEmtc9oux ++ rm /tmp/tmp.1gYg8pCGrT /tmp/tmp.1bEmtc9oux ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0Heofz4L1y +++ mktemp ++ local LAST_ERR=/tmp/tmp.k6GpxnsudT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0Heofz4L1y ++ cat /tmp/tmp.k6GpxnsudT ++ rm /tmp/tmp.0Heofz4L1y /tmp/tmp.k6GpxnsudT ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql /tmp/tmp.vQXhoSoFAX/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xr2CR61fzo +++ mktemp ++ local LAST_ERR=/tmp/tmp.ULeK15B8wy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Xr2CR61fzo ++ cat /tmp/tmp.ULeK15B8wy ++ rm /tmp/tmp.Xr2CR61fzo /tmp/tmp.ULeK15B8wy ++ return 0 + newpass=',rqet}=9y..VXNLO7' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\'',rqet}=9y..VXNLO7'\'';' '-h some-name-pxc -uroot -p'\'',rqet}=9y..VXNLO7'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\'',rqet}=9y..VXNLO7'\'';' + local 'uri=-h some-name-pxc -uroot -p'\'',rqet}=9y..VXNLO7'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mAhsxsEDI0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3DoCYx5sWa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mAhsxsEDI0 ++ cat /tmp/tmp.3DoCYx5sWa ++ rm /tmp/tmp.mAhsxsEDI0 /tmp/tmp.3DoCYx5sWa ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\'',rqet}=9y..VXNLO7'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\'',rqet}=9y..VXNLO7'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\'',rqet}=9y..VXNLO7'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\'',rqet}=9y..VXNLO7'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AK23uF2NmK +++ mktemp ++ local LAST_ERR=/tmp/tmp.3TIKSQpKM5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AK23uF2NmK ++ cat /tmp/tmp.3TIKSQpKM5 ++ rm /tmp/tmp.AK23uF2NmK /tmp/tmp.3TIKSQpKM5 ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql /tmp/tmp.vQXhoSoFAX/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.FyulNgSF9r +++ mktemp ++ local LAST_ERR=/tmp/tmp.6KxeFpUzag ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FyulNgSF9r ++ cat /tmp/tmp.6KxeFpUzag ++ rm /tmp/tmp.FyulNgSF9r /tmp/tmp.6KxeFpUzag ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Pyk95ePOdz ++ mktemp + local LAST_ERR=/tmp/tmp.vivlLZiUz7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Pyk95ePOdz secret/my-cluster-secrets-2 configured + cat /tmp/tmp.vivlLZiUz7 Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.Pyk95ePOdz /tmp/tmp.vivlLZiUz7 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.klppYNenPr +++ mktemp ++ local LAST_ERR=/tmp/tmp.MTQupp2djv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.klppYNenPr ++ cat /tmp/tmp.MTQupp2djv ++ rm /tmp/tmp.klppYNenPr /tmp/tmp.MTQupp2djv ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-4.sql /tmp/tmp.vQXhoSoFAX/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.ybVFVfXYNV + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2199-baa7db2e#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-20579~ + local LAST_ERR=/tmp/tmp.gvAFP2lTXN + local exit_status=0 + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ybVFVfXYNV perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.gvAFP2lTXN + rm /tmp/tmp.ybVFVfXYNV /tmp/tmp.gvAFP2lTXN + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.60to41CZKE +++ mktemp ++ local LAST_ERR=/tmp/tmp.fu8oK8e7u6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.60to41CZKE ++ cat /tmp/tmp.fu8oK8e7u6 ++ rm /tmp/tmp.60to41CZKE /tmp/tmp.fu8oK8e7u6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lw7PZ3l2w6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.m4e3zHrlq4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lw7PZ3l2w6 ++ cat /tmp/tmp.m4e3zHrlq4 ++ rm /tmp/tmp.Lw7PZ3l2w6 /tmp/tmp.m4e3zHrlq4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZfsytOTlfN +++ mktemp ++ local LAST_ERR=/tmp/tmp.JtzbF4byHr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZfsytOTlfN ++ cat /tmp/tmp.JtzbF4byHr ++ rm /tmp/tmp.ZfsytOTlfN /tmp/tmp.JtzbF4byHr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iWns24MRwM +++ mktemp ++ local LAST_ERR=/tmp/tmp.GI8mchTOH8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iWns24MRwM ++ cat /tmp/tmp.GI8mchTOH8 ++ rm /tmp/tmp.iWns24MRwM /tmp/tmp.GI8mchTOH8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2Sp5ov5VJB +++ mktemp ++ local LAST_ERR=/tmp/tmp.zvvcJBpVDK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2Sp5ov5VJB ++ cat /tmp/tmp.zvvcJBpVDK ++ rm /tmp/tmp.2Sp5ov5VJB /tmp/tmp.zvvcJBpVDK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3gN6z7czG4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.np6Np1YJba ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3gN6z7czG4 ++ cat /tmp/tmp.np6Np1YJba ++ rm /tmp/tmp.3gN6z7czG4 /tmp/tmp.np6Np1YJba ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ni7PeViPiL +++ mktemp ++ local LAST_ERR=/tmp/tmp.TUBJJZ3wSF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ni7PeViPiL ++ cat /tmp/tmp.TUBJJZ3wSF ++ rm /tmp/tmp.Ni7PeViPiL /tmp/tmp.TUBJJZ3wSF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XiUsS8K3ZX +++ mktemp ++ local LAST_ERR=/tmp/tmp.xmvUkab8yo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XiUsS8K3ZX ++ cat /tmp/tmp.xmvUkab8yo ++ rm /tmp/tmp.XiUsS8K3ZX /tmp/tmp.xmvUkab8yo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o62yv2EfNo +++ mktemp ++ local LAST_ERR=/tmp/tmp.uy3EFUh1rZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o62yv2EfNo ++ cat /tmp/tmp.uy3EFUh1rZ ++ rm /tmp/tmp.o62yv2EfNo /tmp/tmp.uy3EFUh1rZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4bY3XRtqo6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.QPesbaRp7q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4bY3XRtqo6 ++ cat /tmp/tmp.QPesbaRp7q ++ rm /tmp/tmp.4bY3XRtqo6 /tmp/tmp.QPesbaRp7q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k08FXeGt9G +++ mktemp ++ local LAST_ERR=/tmp/tmp.d8TMoW9M6b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k08FXeGt9G ++ cat /tmp/tmp.d8TMoW9M6b ++ rm /tmp/tmp.k08FXeGt9G /tmp/tmp.d8TMoW9M6b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4YQlpV22PW +++ mktemp ++ local LAST_ERR=/tmp/tmp.blnijlyl1b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4YQlpV22PW ++ cat /tmp/tmp.blnijlyl1b ++ rm /tmp/tmp.4YQlpV22PW /tmp/tmp.blnijlyl1b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.egH6bQBeky +++ mktemp ++ local LAST_ERR=/tmp/tmp.7gf8qO55k3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.egH6bQBeky ++ cat /tmp/tmp.7gf8qO55k3 ++ rm /tmp/tmp.egH6bQBeky /tmp/tmp.7gf8qO55k3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8wEJGISUR3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.i2uvrIVZDl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8wEJGISUR3 ++ cat /tmp/tmp.i2uvrIVZDl ++ rm /tmp/tmp.8wEJGISUR3 /tmp/tmp.i2uvrIVZDl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g7yu8Tb0Gg +++ mktemp ++ local LAST_ERR=/tmp/tmp.OmiLeQaxDA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g7yu8Tb0Gg ++ cat /tmp/tmp.OmiLeQaxDA ++ rm /tmp/tmp.g7yu8Tb0Gg /tmp/tmp.OmiLeQaxDA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.No4gUkB5B1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ih1lEWWXLW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.No4gUkB5B1 ++ cat /tmp/tmp.ih1lEWWXLW ++ rm /tmp/tmp.No4gUkB5B1 /tmp/tmp.ih1lEWWXLW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HRjh9JfQgN +++ mktemp ++ local LAST_ERR=/tmp/tmp.1o6MFdpyCO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HRjh9JfQgN ++ cat /tmp/tmp.1o6MFdpyCO ++ rm /tmp/tmp.HRjh9JfQgN /tmp/tmp.1o6MFdpyCO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R9GTrXcdfR +++ mktemp ++ local LAST_ERR=/tmp/tmp.PRTBFD3DgN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.R9GTrXcdfR ++ cat /tmp/tmp.PRTBFD3DgN ++ rm /tmp/tmp.R9GTrXcdfR /tmp/tmp.PRTBFD3DgN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NN7BM3mcuB +++ mktemp ++ local LAST_ERR=/tmp/tmp.kILkhxOnyU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NN7BM3mcuB ++ cat /tmp/tmp.kILkhxOnyU ++ rm /tmp/tmp.NN7BM3mcuB /tmp/tmp.kILkhxOnyU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X0QIIgqLl3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cvnWw8esLq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X0QIIgqLl3 ++ cat /tmp/tmp.cvnWw8esLq ++ rm /tmp/tmp.X0QIIgqLl3 /tmp/tmp.cvnWw8esLq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ApgzpvHBIM +++ mktemp ++ local LAST_ERR=/tmp/tmp.5XFmUhCSRD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ApgzpvHBIM ++ cat /tmp/tmp.5XFmUhCSRD ++ rm /tmp/tmp.ApgzpvHBIM /tmp/tmp.5XFmUhCSRD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hucqSSKRih +++ mktemp ++ local LAST_ERR=/tmp/tmp.RTTok8EZEe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hucqSSKRih ++ cat /tmp/tmp.RTTok8EZEe ++ rm /tmp/tmp.hucqSSKRih /tmp/tmp.RTTok8EZEe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wt7USuaFSH +++ mktemp ++ local LAST_ERR=/tmp/tmp.wVtqi0eej0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Wt7USuaFSH ++ cat /tmp/tmp.wVtqi0eej0 ++ rm /tmp/tmp.Wt7USuaFSH /tmp/tmp.wVtqi0eej0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HG83XSQMzn +++ mktemp ++ local LAST_ERR=/tmp/tmp.G0Gc4Mfwn3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HG83XSQMzn ++ cat /tmp/tmp.G0Gc4Mfwn3 ++ rm /tmp/tmp.HG83XSQMzn /tmp/tmp.G0Gc4Mfwn3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hf22fnDdjN +++ mktemp ++ local LAST_ERR=/tmp/tmp.wt4b1F40nU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hf22fnDdjN ++ cat /tmp/tmp.wt4b1F40nU ++ rm /tmp/tmp.hf22fnDdjN /tmp/tmp.wt4b1F40nU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J6yh2DwCI0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.XgBUh4OM62 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J6yh2DwCI0 ++ cat /tmp/tmp.XgBUh4OM62 ++ rm /tmp/tmp.J6yh2DwCI0 /tmp/tmp.XgBUh4OM62 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.0lhZvH8vGc ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZUAVqV7LC7 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.0lhZvH8vGc +++++ cat /tmp/tmp.ZUAVqV7LC7 +++++ rm /tmp/tmp.0lhZvH8vGc /tmp/tmp.ZUAVqV7LC7 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t2pW7RiHcy +++ mktemp ++ local LAST_ERR=/tmp/tmp.sug69fo7eg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t2pW7RiHcy ++ cat /tmp/tmp.sug69fo7eg ++ rm /tmp/tmp.t2pW7RiHcy /tmp/tmp.sug69fo7eg ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 2 haproxy some-name + local generation=2 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n7yy5waACu +++ mktemp ++ local LAST_ERR=/tmp/tmp.jab2p5tcB2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n7yy5waACu ++ cat /tmp/tmp.jab2p5tcB2 ++ rm /tmp/tmp.n7yy5waACu /tmp/tmp.jab2p5tcB2 ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.LQlRRYG8Bm ++ mktemp + local LAST_ERR=/tmp/tmp.VXwxmjNzNL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LQlRRYG8Bm secret/my-cluster-secrets patched + cat /tmp/tmp.VXwxmjNzNL + rm /tmp/tmp.LQlRRYG8Bm /tmp/tmp.VXwxmjNzNL + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wMmJu2ILc6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.a407VMCUuH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wMmJu2ILc6 ++ cat /tmp/tmp.a407VMCUuH ++ rm /tmp/tmp.wMmJu2ILc6 /tmp/tmp.a407VMCUuH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EVIMPY3hht +++ mktemp ++ local LAST_ERR=/tmp/tmp.mu6v4QRBBG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EVIMPY3hht ++ cat /tmp/tmp.mu6v4QRBBG ++ rm /tmp/tmp.EVIMPY3hht /tmp/tmp.mu6v4QRBBG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Sp4rwlIqlG +++ mktemp ++ local LAST_ERR=/tmp/tmp.6fZWgBHysS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Sp4rwlIqlG ++ cat /tmp/tmp.6fZWgBHysS ++ rm /tmp/tmp.Sp4rwlIqlG /tmp/tmp.6fZWgBHysS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BBoWIqDrFu +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xaz7n1Fztk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BBoWIqDrFu ++ cat /tmp/tmp.Xaz7n1Fztk ++ rm /tmp/tmp.BBoWIqDrFu /tmp/tmp.Xaz7n1Fztk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oq0MkXW0LS +++ mktemp ++ local LAST_ERR=/tmp/tmp.rRcckPKCKH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oq0MkXW0LS ++ cat /tmp/tmp.rRcckPKCKH ++ rm /tmp/tmp.oq0MkXW0LS /tmp/tmp.rRcckPKCKH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kOtnNPENDR +++ mktemp ++ local LAST_ERR=/tmp/tmp.pmaMxfgGPk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kOtnNPENDR ++ cat /tmp/tmp.pmaMxfgGPk ++ rm /tmp/tmp.kOtnNPENDR /tmp/tmp.pmaMxfgGPk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uBSx1gq6MU +++ mktemp ++ local LAST_ERR=/tmp/tmp.g1iAOUwS4x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uBSx1gq6MU ++ cat /tmp/tmp.g1iAOUwS4x ++ rm /tmp/tmp.uBSx1gq6MU /tmp/tmp.g1iAOUwS4x ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zQbqirZSwn +++ mktemp ++ local LAST_ERR=/tmp/tmp.hHwNA23adw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zQbqirZSwn ++ cat /tmp/tmp.hHwNA23adw ++ rm /tmp/tmp.zQbqirZSwn /tmp/tmp.hHwNA23adw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UQMoHE9ioU +++ mktemp ++ local LAST_ERR=/tmp/tmp.TQrmNoQVRW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UQMoHE9ioU ++ cat /tmp/tmp.TQrmNoQVRW ++ rm /tmp/tmp.UQMoHE9ioU /tmp/tmp.TQrmNoQVRW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zrCktzwmba +++ mktemp ++ local LAST_ERR=/tmp/tmp.nHatq1lXlR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zrCktzwmba ++ cat /tmp/tmp.nHatq1lXlR ++ rm /tmp/tmp.zrCktzwmba /tmp/tmp.nHatq1lXlR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c5BpoNxb51 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ehXVidRNhl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c5BpoNxb51 ++ cat /tmp/tmp.ehXVidRNhl ++ rm /tmp/tmp.c5BpoNxb51 /tmp/tmp.ehXVidRNhl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dXh6GBJSmm +++ mktemp ++ local LAST_ERR=/tmp/tmp.6lUR1RslQc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dXh6GBJSmm ++ cat /tmp/tmp.6lUR1RslQc ++ rm /tmp/tmp.dXh6GBJSmm /tmp/tmp.6lUR1RslQc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kLOEfqcKON +++ mktemp ++ local LAST_ERR=/tmp/tmp.wQY9YfYBUw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kLOEfqcKON ++ cat /tmp/tmp.wQY9YfYBUw ++ rm /tmp/tmp.kLOEfqcKON /tmp/tmp.wQY9YfYBUw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w15QSNuulu +++ mktemp ++ local LAST_ERR=/tmp/tmp.XjTNRseVSV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w15QSNuulu ++ cat /tmp/tmp.XjTNRseVSV ++ rm /tmp/tmp.w15QSNuulu /tmp/tmp.XjTNRseVSV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SoBhKN4pHi +++ mktemp ++ local LAST_ERR=/tmp/tmp.havwimeJtK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SoBhKN4pHi ++ cat /tmp/tmp.havwimeJtK ++ rm /tmp/tmp.SoBhKN4pHi /tmp/tmp.havwimeJtK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YaB6uhg6Vo +++ mktemp ++ local LAST_ERR=/tmp/tmp.Kla2GvroQ5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YaB6uhg6Vo ++ cat /tmp/tmp.Kla2GvroQ5 ++ rm /tmp/tmp.YaB6uhg6Vo /tmp/tmp.Kla2GvroQ5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bYH2qUIj68 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oPYsSppkIY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bYH2qUIj68 ++ cat /tmp/tmp.oPYsSppkIY ++ rm /tmp/tmp.bYH2qUIj68 /tmp/tmp.oPYsSppkIY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FDCEE80scZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.t5ydXU4qER ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FDCEE80scZ ++ cat /tmp/tmp.t5ydXU4qER ++ rm /tmp/tmp.FDCEE80scZ /tmp/tmp.t5ydXU4qER ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LvMfctJ05Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.bxCaUwZRVO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LvMfctJ05Q ++ cat /tmp/tmp.bxCaUwZRVO ++ rm /tmp/tmp.LvMfctJ05Q /tmp/tmp.bxCaUwZRVO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RIIZcxhSwP +++ mktemp ++ local LAST_ERR=/tmp/tmp.8dZH3jjava ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RIIZcxhSwP ++ cat /tmp/tmp.8dZH3jjava ++ rm /tmp/tmp.RIIZcxhSwP /tmp/tmp.8dZH3jjava ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hggws4XFYf +++ mktemp ++ local LAST_ERR=/tmp/tmp.s2fcitqmm2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hggws4XFYf ++ cat /tmp/tmp.s2fcitqmm2 ++ rm /tmp/tmp.hggws4XFYf /tmp/tmp.s2fcitqmm2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HNhjehEo02 +++ mktemp ++ local LAST_ERR=/tmp/tmp.emkIzfnppX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HNhjehEo02 ++ cat /tmp/tmp.emkIzfnppX ++ rm /tmp/tmp.HNhjehEo02 /tmp/tmp.emkIzfnppX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NKWHvIbhMA +++ mktemp ++ local LAST_ERR=/tmp/tmp.meZlTsZZP3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NKWHvIbhMA ++ cat /tmp/tmp.meZlTsZZP3 ++ rm /tmp/tmp.NKWHvIbhMA /tmp/tmp.meZlTsZZP3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L9eZGgCeSz +++ mktemp ++ local LAST_ERR=/tmp/tmp.mKDNLgzvwJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L9eZGgCeSz ++ cat /tmp/tmp.mKDNLgzvwJ ++ rm /tmp/tmp.L9eZGgCeSz /tmp/tmp.mKDNLgzvwJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qkG5XyjjaE +++ mktemp ++ local LAST_ERR=/tmp/tmp.fnpHWaGW47 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qkG5XyjjaE ++ cat /tmp/tmp.fnpHWaGW47 ++ rm /tmp/tmp.qkG5XyjjaE /tmp/tmp.fnpHWaGW47 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PJIxDcLgll +++ mktemp ++ local LAST_ERR=/tmp/tmp.PYmDRCnozA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PJIxDcLgll ++ cat /tmp/tmp.PYmDRCnozA ++ rm /tmp/tmp.PJIxDcLgll /tmp/tmp.PYmDRCnozA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UtN6eI0KBP +++ mktemp ++ local LAST_ERR=/tmp/tmp.GBrVxDeS2d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UtN6eI0KBP ++ cat /tmp/tmp.GBrVxDeS2d ++ rm /tmp/tmp.UtN6eI0KBP /tmp/tmp.GBrVxDeS2d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QaLnWH9egj +++ mktemp ++ local LAST_ERR=/tmp/tmp.pB6ckffk5V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QaLnWH9egj ++ cat /tmp/tmp.pB6ckffk5V ++ rm /tmp/tmp.QaLnWH9egj /tmp/tmp.pB6ckffk5V ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eY4qNrdiw1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7Er0dn1Lre ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eY4qNrdiw1 ++ cat /tmp/tmp.7Er0dn1Lre ++ rm /tmp/tmp.eY4qNrdiw1 /tmp/tmp.7Er0dn1Lre ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mpg43I6Yy7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mqPdrimDFI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Mpg43I6Yy7 ++ cat /tmp/tmp.mqPdrimDFI ++ rm /tmp/tmp.Mpg43I6Yy7 /tmp/tmp.mqPdrimDFI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rP5HQyNJyU +++ mktemp ++ local LAST_ERR=/tmp/tmp.6pUXCOzsXR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rP5HQyNJyU ++ cat /tmp/tmp.6pUXCOzsXR ++ rm /tmp/tmp.rP5HQyNJyU /tmp/tmp.6pUXCOzsXR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xLQOvW8Mzb +++ mktemp ++ local LAST_ERR=/tmp/tmp.jZPlm4M7o2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xLQOvW8Mzb ++ cat /tmp/tmp.jZPlm4M7o2 ++ rm /tmp/tmp.xLQOvW8Mzb /tmp/tmp.jZPlm4M7o2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I2fkWg1NeB +++ mktemp ++ local LAST_ERR=/tmp/tmp.4xgF9jG72b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I2fkWg1NeB ++ cat /tmp/tmp.4xgF9jG72b ++ rm /tmp/tmp.I2fkWg1NeB /tmp/tmp.4xgF9jG72b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BU2P9hxxe4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9kGouTWzRI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BU2P9hxxe4 ++ cat /tmp/tmp.9kGouTWzRI ++ rm /tmp/tmp.BU2P9hxxe4 /tmp/tmp.9kGouTWzRI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PqMj4BScUv +++ mktemp ++ local LAST_ERR=/tmp/tmp.jsarg19Llz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PqMj4BScUv ++ cat /tmp/tmp.jsarg19Llz ++ rm /tmp/tmp.PqMj4BScUv /tmp/tmp.jsarg19Llz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R83CqZ92A7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AgT1wOyhkj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.R83CqZ92A7 ++ cat /tmp/tmp.AgT1wOyhkj ++ rm /tmp/tmp.R83CqZ92A7 /tmp/tmp.AgT1wOyhkj ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J6crVmtd6M +++ mktemp ++ local LAST_ERR=/tmp/tmp.6tSf3siuzR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J6crVmtd6M ++ cat /tmp/tmp.6tSf3siuzR ++ rm /tmp/tmp.J6crVmtd6M /tmp/tmp.6tSf3siuzR ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.4wxlsUfaSo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.U34lWIBNoK +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.4wxlsUfaSo +++++ cat /tmp/tmp.U34lWIBNoK +++++ rm /tmp/tmp.4wxlsUfaSo /tmp/tmp.U34lWIBNoK +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qFFxvUW7jl +++ mktemp ++ local LAST_ERR=/tmp/tmp.sYdh09P9AO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qFFxvUW7jl ++ cat /tmp/tmp.sYdh09P9AO ++ rm /tmp/tmp.qFFxvUW7jl /tmp/tmp.sYdh09P9AO ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-3-57.sql ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qAgqYDRhzN +++ mktemp ++ local LAST_ERR=/tmp/tmp.UC0Z7VujJD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qAgqYDRhzN ++ cat /tmp/tmp.UC0Z7VujJD ++ rm /tmp/tmp.qAgqYDRhzN /tmp/tmp.UC0Z7VujJD ++ return 0 + client_pod=pxc-client-857d976497-jk7xs + wait_pod pxc-client-857d976497-jk7xs + local pod=pxc-client-857d976497-jk7xs + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-jk7xs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-jk7xs condition met waiting for pod/pxc-client-857d976497-jk7xs to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.vQXhoSoFAX/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2199/e2e-tests/users/compare/select-3.sql /tmp/tmp.vQXhoSoFAX/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 3 haproxy some-name + local generation=3 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N4GgaatpXE +++ mktemp ++ local LAST_ERR=/tmp/tmp.GVbalG4xqN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N4GgaatpXE ++ cat /tmp/tmp.GVbalG4xqN ++ rm /tmp/tmp.N4GgaatpXE /tmp/tmp.GVbalG4xqN ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + destroy users-20579 + local namespace=users-20579 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' ++ get_operator_pod + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.vQXhoSoFAX/operator.log ++ local label_prefix=app.kubernetes.io/ + sort -u +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.PbJ1MaLaTw +++ mktemp ++ local LAST_ERR=/tmp/tmp.hJnRXp5xBS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PbJ1MaLaTw ++ cat /tmp/tmp.hJnRXp5xBS ++ rm /tmp/tmp.PbJ1MaLaTw /tmp/tmp.hJnRXp5xBS ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-56f95ddfc4-lf25l ++ mktemp + local LAST_OUT=/tmp/tmp.AhtwCBcUaT ++ mktemp + local LAST_ERR=/tmp/tmp.FxE9PN64M2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-56f95ddfc4-lf25l + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AhtwCBcUaT + cat /tmp/tmp.FxE9PN64M2 + rm /tmp/tmp.AhtwCBcUaT /tmp/tmp.FxE9PN64M2 + return 0 2025-10-17T13:29:44.753Z INFO setup Manager starting up {"gitCommit": "baa7db2e6f9ee018ee01995818794ca084b10adf", "gitBranch": "PR-2199-baa7db2e", "buildTime": "2025-10-17T11:23:00Z", "goVersion": "go1.25.3", "os": "linux", "arch": "amd64"} 2025-10-17T13:29:44.753Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1023000"} 2025-10-17T13:29:44.756Z INFO setup Registering Components. 2025-10-17T13:29:45.188Z INFO controller-runtime.metrics Starting metrics server 2025-10-17T13:29:45.188Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-10-17T13:29:45.188Z INFO setup Starting the Cmd. 2025-10-17T13:29:45.192Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-10-17T13:29:45.193Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-10-17T13:29:45.193Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-10-17T13:29:45.193Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-10-17T13:29:45.193Z INFO controller-runtime.webhook Starting webhook server 2025-10-17T13:29:45.193Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-10-17T13:29:45.294Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-10-17T13:29:45.330Z DEBUG events percona-xtradb-cluster-operator-56f95ddfc4-lf25l_9ec6cecb-b32f-4338-b2e6-b7e66fdf934b became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"254d7f67-2c77-46e3-bcf6-a612248cd2a2","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1760707785321871009"}, "reason": "LeaderElection"} 2025-10-17T13:29:45.330Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-10-17T13:29:45.330Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-10-17T13:29:45.330Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-10-17T13:29:45.330Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-10-17T13:29:45.331Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-10-17T13:29:45.431Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-10-17T13:29:45.431Z INFO Starting Controller {"controller": "pxc-controller"} 2025-10-17T13:29:45.431Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-10-17T13:29:45.431Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-10-17T13:29:45.532Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-10-17T13:29:45.532Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-10-17T13:30:28.344Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "9a99ec96-9a84-4e21-a419-d51aac25c377", "version": "1.19.0"} 2025-10-17T13:30:28.592Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "9a99ec96-9a84-4e21-a419-d51aac25c377", "secrets": "my-cluster-secrets"} 2025-10-17T13:30:28.811Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "9a99ec96-9a84-4e21-a419-d51aac25c377", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-17T13:30:28.836Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "9a99ec96-9a84-4e21-a419-d51aac25c377", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-17T13:30:29.398Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "9a99ec96-9a84-4e21-a419-d51aac25c377", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-17T13:30:29.512Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "fba75372-fedc-4d5e-8979-4f6a0a5db404", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-17T13:30:29.565Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "fba75372-fedc-4d5e-8979-4f6a0a5db404", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-17T13:30:29.754Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "fba75372-fedc-4d5e-8979-4f6a0a5db404", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-17T13:30:29.835Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "fba75372-fedc-4d5e-8979-4f6a0a5db404", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-17T13:30:29.956Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "fba75372-fedc-4d5e-8979-4f6a0a5db404", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-17T13:30:30.104Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "fba75372-fedc-4d5e-8979-4f6a0a5db404", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-17T13:30:30.260Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "fba75372-fedc-4d5e-8979-4f6a0a5db404", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-17T13:30:31.001Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "a76848bf-0bc8-4c23-9844-3ade89f587a5", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-17T13:31:47.681Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e56c982c-dcdd-47b3-b2f6-d9f44a44644d", "user": "operator"} 2025-10-17T13:31:47.716Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e56c982c-dcdd-47b3-b2f6-d9f44a44644d", "user": "monitor"} 2025-10-17T13:31:47.755Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e56c982c-dcdd-47b3-b2f6-d9f44a44644d"} 2025-10-17T13:31:47.790Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e56c982c-dcdd-47b3-b2f6-d9f44a44644d", "user": "xtrabackup"} 2025-10-17T13:31:47.824Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e56c982c-dcdd-47b3-b2f6-d9f44a44644d"} 2025-10-17T13:31:47.833Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e56c982c-dcdd-47b3-b2f6-d9f44a44644d", "err": "get primary pxc pod: not found"} 2025-10-17T13:31:52.630Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "9e7080a0-991c-4f2c-a5d1-b255baf14447", "err": "get primary pxc pod: not found"} 2025-10-17T13:31:57.765Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "ab3afa48-e71d-4eb5-8e04-e42f64f2c6e8", "err": "get primary pxc pod: not found"} 2025-10-17T13:34:14.026Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "1ba0898d-b78b-4d72-ac4b-d6e1226b3c3c", "user": "root"} 2025-10-17T13:34:14.069Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "1ba0898d-b78b-4d72-ac4b-d6e1226b3c3c", "user": "replication"} 2025-10-17T13:34:14.141Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "1ba0898d-b78b-4d72-ac4b-d6e1226b3c3c", "new version": "5.7.44-48-57"} 2025-10-17T13:34:15.928Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "1ba0898d-b78b-4d72-ac4b-d6e1226b3c3c"} 2025-10-17T13:34:20.726Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "33c2f81f-4cf9-4749-856f-d6924aed7182"} 2025-10-17T13:34:26.026Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "1b8230b4-d082-4c72-8a99-acb2df025653"} 2025-10-17T13:34:31.514Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "89dac5a1-c1b6-4c1a-9ff8-bd8c5904bd6e"} 2025-10-17T13:34:37.548Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "87297304-9ac6-4603-83f1-50ba5271a041"} 2025-10-17T13:34:42.631Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "c9ea68ef-8298-4124-8ad7-d62d7482aa40"} 2025-10-17T13:34:47.951Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "be4b7b07-b087-4f47-bc93-3040a88fc12f"} 2025-10-17T13:34:53.110Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "780f9988-3c08-446c-a489-b53644450155"} 2025-10-17T13:34:58.717Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "0b05b09e-7d80-4ef3-962c-399e6fac5f35"} 2025-10-17T13:35:04.004Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "19663cfe-ba53-46ca-b1cf-06168b66f3dc"} 2025-10-17T13:35:09.320Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "be4f3e13-8bdd-4492-9d6e-dd893b61da8e"} 2025-10-17T13:35:14.645Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "de8292b7-0d0f-4689-9f78-9baa8e75c1d1"} 2025-10-17T13:35:19.807Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "03bf37f7-e27e-4167-911c-88b950e63dd3"} 2025-10-17T13:35:24.847Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "7d5c6b21-a10d-498b-94cf-145b3acb1a20"} 2025-10-17T13:35:29.905Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "8eb2974a-86d1-443d-be02-0f520abb159a"} 2025-10-17T13:35:35.648Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d6e262e1-7226-402c-a505-cfcf6707bbcd"} 2025-10-17T13:35:40.842Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "551e6935-0463-4513-8a21-cf726d3aa11a"} 2025-10-17T13:35:46.129Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d8f28daa-4f31-4aaa-8ae6-6abb2d90f96f"} 2025-10-17T13:35:51.446Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e15dc0a5-faf9-4220-a26e-ad43fba38a64"} 2025-10-17T13:35:54.400Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "03dbe5dc-ace1-4291-a22d-ad92da302b6b", "user": "root"} 2025-10-17T13:35:54.413Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "03dbe5dc-ace1-4291-a22d-ad92da302b6b", "user": "root"} 2025-10-17T13:35:54.437Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "03dbe5dc-ace1-4291-a22d-ad92da302b6b", "secret": "some-name-mysql-init", "user": "root"} 2025-10-17T13:35:56.926Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "03dbe5dc-ace1-4291-a22d-ad92da302b6b"} 2025-10-17T13:35:56.954Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "03dbe5dc-ace1-4291-a22d-ad92da302b6b", "user": "root"} 2025-10-17T13:35:58.543Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "03dbe5dc-ace1-4291-a22d-ad92da302b6b"} 2025-10-17T13:36:04.023Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "b4b03a22-7977-499d-bc7e-7560cfeaa83f"} 2025-10-17T13:36:09.146Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "ff3eb7f1-b099-48a8-bfd4-6fb9290870f1"} 2025-10-17T13:36:13.556Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "77fce8f2-0734-4a6b-9376-8841373625a2", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:36:13.612Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "77fce8f2-0734-4a6b-9376-8841373625a2", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:36:15.026Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "fbbc33ec-2753-496a-9ba6-e4b0848c01cb"} 2025-10-17T13:36:44.206Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "a8439dfa-a688-4484-8632-7c1468c7e379", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-17T13:36:44.536Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "3076c0b1-775f-4b0d-a186-ebc8ae37efe3", "user": "proxyadmin"} 2025-10-17T13:36:44.536Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "3076c0b1-775f-4b0d-a186-ebc8ae37efe3", "user": "proxyadmin"} 2025-10-17T13:36:44.567Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "3076c0b1-775f-4b0d-a186-ebc8ae37efe3", "user": "proxyadmin"} 2025-10-17T13:36:44.591Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "3076c0b1-775f-4b0d-a186-ebc8ae37efe3", "user": "proxyadmin"} 2025-10-17T13:36:44.591Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "3076c0b1-775f-4b0d-a186-ebc8ae37efe3", "last-applied-secret": "9bedd67668a9ce0b3ee4b0bde86b99e8ab6068a54eb6553368f477e955cc6565"} 2025-10-17T13:36:44.594Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "3076c0b1-775f-4b0d-a186-ebc8ae37efe3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:36:46.297Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "3076c0b1-775f-4b0d-a186-ebc8ae37efe3", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-17T13:37:23.460Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "ca46e130-d635-4754-bbf2-440ad8296876"} 2025-10-17T13:37:27.466Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "6f832bb6-a29a-4ed1-824e-d34d435a88f0"} 2025-10-17T13:37:30.636Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "66988b02-2267-4974-9f5c-de68c6067ef1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:37:30.699Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "66988b02-2267-4974-9f5c-de68c6067ef1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:37:32.486Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "846fc143-358e-4d6a-9afa-c796d6a40d42", "user": "xtrabackup"} 2025-10-17T13:37:32.501Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "846fc143-358e-4d6a-9afa-c796d6a40d42", "user": "xtrabackup"} 2025-10-17T13:37:32.524Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "846fc143-358e-4d6a-9afa-c796d6a40d42", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-17T13:37:32.548Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "846fc143-358e-4d6a-9afa-c796d6a40d42", "user": "xtrabackup"} 2025-10-17T13:37:32.548Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "846fc143-358e-4d6a-9afa-c796d6a40d42", "last-applied-secret": "c987768299d7cd1d20e1f0977d005ed6482ed9aaabcd5436e34f5fc863ef777b"} 2025-10-17T13:37:32.552Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "846fc143-358e-4d6a-9afa-c796d6a40d42", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:37:32.601Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "66988b02-2267-4974-9f5c-de68c6067ef1"} 2025-10-17T13:39:26.400Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "ac07572b-3e78-43a4-9c3b-b798a1fdec26", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.232.168.53:33062: connect: connection refused"} 2025-10-17T13:39:31.559Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "3713f208-3fa8-4468-ba86-1985f21c0856", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:39:36.762Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "6122e261-a72c-4b1d-a37b-abda5ffa009d", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:39:41.964Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "91f00cf4-0d00-487f-9803-3b664761fb3f", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:39:47.117Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "2a15e1f3-3659-45bb-93ad-d50b273cad54", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:39:52.275Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "0b2987dd-9bfa-49bb-b540-8437d685bf2b", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:39:57.436Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "c19009fc-9bb1-49a9-bac3-1ae34cb8c3bf", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:40:02.573Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "05497221-69a5-406c-aaee-c57b307f91a2", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:40:10.425Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "b281de47-3ae0-4d9f-bb24-fe738db6d866"} 2025-10-17T13:40:15.307Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7d924b3-d09e-469a-b26d-75c490f8fa9e"} 2025-10-17T13:40:16.770Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "cfa7d97b-bfcd-494c-9fde-8cdfa4a39a74", "user": "monitor"} 2025-10-17T13:40:16.780Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "cfa7d97b-bfcd-494c-9fde-8cdfa4a39a74", "user": "monitor"} 2025-10-17T13:40:16.801Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "cfa7d97b-bfcd-494c-9fde-8cdfa4a39a74", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-17T13:40:16.820Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "cfa7d97b-bfcd-494c-9fde-8cdfa4a39a74", "user": "monitor"} 2025-10-17T13:40:16.845Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "cfa7d97b-bfcd-494c-9fde-8cdfa4a39a74", "user": "monitor"} 2025-10-17T13:40:16.845Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "cfa7d97b-bfcd-494c-9fde-8cdfa4a39a74", "last-applied-secret": "66239470076d8b803c66c6ed69c31e024e0424a0a108c8047ef01b21abd3f519"} 2025-10-17T13:40:16.849Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "cfa7d97b-bfcd-494c-9fde-8cdfa4a39a74", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:40:19.183Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "cfa7d97b-bfcd-494c-9fde-8cdfa4a39a74", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-17T13:41:10.747Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "cfa0caa3-3242-46dd-b19c-c146d0ea7704"} 2025-10-17T13:41:15.843Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "937ab935-207c-4fdf-9483-0ec262d55032"} 2025-10-17T13:41:21.744Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "ca43e0c3-8256-4cfe-b959-ae3a09c97517"} 2025-10-17T13:41:26.073Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "5aea3b88-bdef-4b0d-a7a4-724cd9f2c7e2"} 2025-10-17T13:41:29.949Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "6fab1580-7e11-431f-89f2-0026238e3ce3", "user": "operator"} 2025-10-17T13:41:29.960Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "6fab1580-7e11-431f-89f2-0026238e3ce3", "user": "operator"} 2025-10-17T13:41:29.981Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "6fab1580-7e11-431f-89f2-0026238e3ce3", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-17T13:41:30.005Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "6fab1580-7e11-431f-89f2-0026238e3ce3", "user": "operator"} 2025-10-17T13:41:30.005Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "6fab1580-7e11-431f-89f2-0026238e3ce3", "last-applied-secret": "0ec1210a11c59b9e0dbad37875c36033bb5b49f8d6019ab13f031eb19bf6bcab"} 2025-10-17T13:41:30.012Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "6fab1580-7e11-431f-89f2-0026238e3ce3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:41:33.415Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "62c7c4d0-3404-4540-9571-122e71b8ef88", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-20579.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-17T13:42:24.574Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "7a35fbea-452c-4476-86db-f13e9e93ff06"} 2025-10-17T13:42:28.801Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "c37f9d8b-1891-4582-b212-f801dd80c7da"} 2025-10-17T13:42:34.401Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "a50e51c0-9abb-4ab1-9792-72e0ca8fdff0"} 2025-10-17T13:42:39.389Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "f33eff7d-2aa2-4dd1-8026-63d075f42f61"} 2025-10-17T13:42:40.513Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "secrets": "my-cluster-secrets-2"} 2025-10-17T13:42:40.513Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "root"} 2025-10-17T13:42:40.529Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "root"} 2025-10-17T13:42:40.547Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "secret": "some-name-mysql-init", "user": "root"} 2025-10-17T13:42:43.106Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb"} 2025-10-17T13:42:43.132Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "root"} 2025-10-17T13:42:43.132Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "operator"} 2025-10-17T13:42:43.143Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "operator"} 2025-10-17T13:42:43.163Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-17T13:42:43.192Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "operator"} 2025-10-17T13:42:43.192Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "monitor"} 2025-10-17T13:42:43.204Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "monitor"} 2025-10-17T13:42:43.226Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-17T13:42:43.244Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "monitor"} 2025-10-17T13:42:43.268Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "monitor"} 2025-10-17T13:42:43.268Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "xtrabackup"} 2025-10-17T13:42:43.280Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "xtrabackup"} 2025-10-17T13:42:43.301Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-17T13:42:43.320Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "xtrabackup"} 2025-10-17T13:42:43.320Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "replication"} 2025-10-17T13:42:43.335Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "replication"} 2025-10-17T13:42:43.360Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "secret": "some-name-mysql-init", "user": "replication"} 2025-10-17T13:42:43.384Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "replication"} 2025-10-17T13:42:43.384Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "proxyadmin"} 2025-10-17T13:42:43.402Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "proxyadmin"} 2025-10-17T13:42:43.423Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "user": "proxyadmin"} 2025-10-17T13:42:43.423Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "last-applied-secret": "bbbaa9f95040662ba210d8e7c4e2ddf5c723d477a45fe5cdb37deeeefba32ac3"} 2025-10-17T13:42:43.423Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "last-applied-secret": "bbbaa9f95040662ba210d8e7c4e2ddf5c723d477a45fe5cdb37deeeefba32ac3"} 2025-10-17T13:42:43.426Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:42:43.480Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:42:45.722Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "90d74472-5f5c-422a-bf76-cbbc2fe2d3eb", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-17T13:44:40.009Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "f740a7f3-b048-49d2-93ea-37d9e56ca97a", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-20579 on 34.118.224.10:53: no such host"} 2025-10-17T13:44:45.188Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "15a4f3e7-6d8b-45e3-b7d5-b66c616c4b5c", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:44:50.418Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "4c6759c9-ed26-4363-8a0e-e656323a4699", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:44:55.777Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "3dd2a884-e5bb-46f6-8da2-96a8c03f1ac7", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:45:00.926Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "25a7f9f1-82bb-4e57-b726-cb3113ca4ea4", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:45:06.108Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "c10ec914-dfc4-4ffa-8ffd-3d925c99d3d2", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:45:11.284Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "619eae09-388c-44c1-8ddb-c15a507c1eb8", "primary name": "some-name-pxc-0.some-name-pxc.users-20579.svc.cluster.local"} 2025-10-17T13:45:18.867Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "0f3f0b61-2fbd-4307-9406-097918945893"} 2025-10-17T13:45:23.856Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "0dd486a5-7420-4f9b-8793-319d87fd0816", "user": "operator"} 2025-10-17T13:45:23.871Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "0dd486a5-7420-4f9b-8793-319d87fd0816", "user": "operator"} 2025-10-17T13:45:23.895Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "0dd486a5-7420-4f9b-8793-319d87fd0816", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-17T13:45:23.916Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "0dd486a5-7420-4f9b-8793-319d87fd0816", "user": "operator"} 2025-10-17T13:45:23.916Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "0dd486a5-7420-4f9b-8793-319d87fd0816", "last-applied-secret": "1091dfe38c67a8fda024884eeae01e505751f06cb9f030a5484ad71b5ec4d9ee"} 2025-10-17T13:45:23.923Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "0dd486a5-7420-4f9b-8793-319d87fd0816", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:45:24.099Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "0dd368c0-444a-4257-a570-2a4b9a914216"} 2025-10-17T13:46:02.613Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "117ef6e8-2bb2-44f8-b650-0a0c3f5cf490"} 2025-10-17T13:46:07.662Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e3611dc9-0e68-4be1-a7f6-5c9580310057"} 2025-10-17T13:46:13.110Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "cb74025c-a825-466f-b2fb-37a04a940e99"} 2025-10-17T13:46:18.332Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "94490f64-3c35-46aa-a194-a942277729dd"} 2025-10-17T13:46:23.818Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "dce6c667-6a0b-48bd-a2a8-353ced00a800"} 2025-10-17T13:46:28.950Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "ee61a3a0-45ce-49e7-ac4a-542a78c75077"} 2025-10-17T13:46:34.033Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "4b17ea3f-505e-4edc-8f62-91743bbca0ec"} 2025-10-17T13:46:39.993Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "6aac62f8-9223-45ce-bc01-960bfd5d5d3e"} 2025-10-17T13:46:45.029Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "49913c57-f9d2-48e1-a7b2-3d82b1b6b0ae"} 2025-10-17T13:46:49.850Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "4b2fd5e7-cc6d-41d5-a2d9-bd40d5189e8f"} 2025-10-17T13:46:55.270Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "2aa917dc-51a1-41cb-9dd9-3f4f90862520"} 2025-10-17T13:47:00.438Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "91ede562-f3ad-45e7-a667-aab438331ef2"} 2025-10-17T13:47:06.719Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "580e5c69-ab69-47e1-8e66-fab46b4e9122"} 2025-10-17T13:47:11.246Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "8baf8a90-f7c0-4f18-af83-501010b30f80"} 2025-10-17T13:47:16.415Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "415df73b-8149-4970-b0e8-4a9f4ca505d4"} 2025-10-17T13:47:21.911Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "73b51774-37cd-47a3-840c-03a87a81dd19"} 2025-10-17T13:47:22.650Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "root"} 2025-10-17T13:47:22.667Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "root"} 2025-10-17T13:47:22.689Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "secret": "some-name-mysql-init", "user": "root"} 2025-10-17T13:47:25.377Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e"} 2025-10-17T13:47:25.401Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "root"} 2025-10-17T13:47:25.401Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "monitor"} 2025-10-17T13:47:25.413Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "monitor"} 2025-10-17T13:47:25.433Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-17T13:47:25.452Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "monitor"} 2025-10-17T13:47:25.478Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "monitor"} 2025-10-17T13:47:25.478Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "xtrabackup"} 2025-10-17T13:47:25.489Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "xtrabackup"} 2025-10-17T13:47:25.508Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-17T13:47:25.535Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "xtrabackup"} 2025-10-17T13:47:25.535Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "proxyadmin"} 2025-10-17T13:47:25.555Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "proxyadmin"} 2025-10-17T13:47:25.575Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "user": "proxyadmin"} 2025-10-17T13:47:25.575Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "last-applied-secret": "2ae68cd165d76f91562789a36a54a196d2eccf055c20698bceb78c4b0d509817"} 2025-10-17T13:47:25.575Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "last-applied-secret": "2ae68cd165d76f91562789a36a54a196d2eccf055c20698bceb78c4b0d509817"} 2025-10-17T13:47:25.578Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:47:25.694Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:47:27.412Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "804d102b-9d0e-4dc4-98ed-246d3956604e", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-10-17T13:47:43.526Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "8b9f5128-6634-4375-bf7e-5fc527fa1c60", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:47:43.573Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "8b9f5128-6634-4375-bf7e-5fc527fa1c60", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-17T13:47:43.636Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "8b9f5128-6634-4375-bf7e-5fc527fa1c60", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-17T13:47:43.736Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "8b9f5128-6634-4375-bf7e-5fc527fa1c60", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-17T13:47:43.804Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "8b9f5128-6634-4375-bf7e-5fc527fa1c60", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-17T13:47:45.094Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "0e54c951-b663-48e7-966e-12486e73453a", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-17T13:49:47.845Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "da65493d-7512-4108-92bd-3fad8ab97e46", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-20579 on 34.118.224.10:53: no such host"} 2025-10-17T13:50:30.059Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "root"} 2025-10-17T13:50:30.075Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "root"} 2025-10-17T13:50:30.096Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "secret": "some-name-mysql-init", "user": "root"} 2025-10-17T13:50:30.119Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "root"} 2025-10-17T13:50:30.119Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "operator"} 2025-10-17T13:50:30.129Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "operator"} 2025-10-17T13:50:30.150Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-17T13:50:30.175Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "operator"} 2025-10-17T13:50:30.175Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "monitor"} 2025-10-17T13:50:30.186Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "monitor"} 2025-10-17T13:50:30.215Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-17T13:50:30.239Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "monitor"} 2025-10-17T13:50:30.239Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "xtrabackup"} 2025-10-17T13:50:30.250Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "xtrabackup"} 2025-10-17T13:50:30.272Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-17T13:50:30.297Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "xtrabackup"} 2025-10-17T13:50:30.297Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "replication"} 2025-10-17T13:50:30.306Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "replication"} 2025-10-17T13:50:30.326Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "secret": "some-name-mysql-init", "user": "replication"} 2025-10-17T13:50:30.354Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "last-applied-secret": "0ec1210a11c59b9e0dbad37875c36033bb5b49f8d6019ab13f031eb19bf6bcab"} 2025-10-17T13:50:30.354Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "user": "replication"} 2025-10-17T13:50:30.354Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "last-applied-secret": "0ec1210a11c59b9e0dbad37875c36033bb5b49f8d6019ab13f031eb19bf6bcab"} 2025-10-17T13:50:30.356Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:50:30.415Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "d7415783-9e67-4330-b2a7-34685ab19afd", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:53:08.924Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e9f60ccd-cffa-4831-b6fd-e00a0539b8bd", "user": "monitor"} 2025-10-17T13:53:08.937Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e9f60ccd-cffa-4831-b6fd-e00a0539b8bd", "user": "monitor"} 2025-10-17T13:53:08.959Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e9f60ccd-cffa-4831-b6fd-e00a0539b8bd", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-17T13:53:08.978Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e9f60ccd-cffa-4831-b6fd-e00a0539b8bd", "last-applied-secret": "9e471e5155668077d37f2efce5b14a85d6718d18c5f31a2c69ff450223da8259"} 2025-10-17T13:53:08.978Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e9f60ccd-cffa-4831-b6fd-e00a0539b8bd", "user": "monitor"} 2025-10-17T13:53:08.980Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "e9f60ccd-cffa-4831-b6fd-e00a0539b8bd", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-17T13:53:27.388Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-20579", "name": "some-name", "reconcileID": "7b5614d9-cf90-4d25-99b4-8a54cc8e0d3e", "err": "get primary pxc pod: failed to get proxy connection: invalid connection"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.3/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 [mysql] 2025/10/17 13:52:36 packets.go:58 read tcp 10.232.168.52:50998->34.118.229.84:3306: i/o timeout [mysql] 2025/10/17 13:53:24 packets.go:58 unexpected EOF [mysql] 2025/10/17 13:53:25 packets.go:58 unexpected EOF [mysql] 2025/10/17 13:53:26 packets.go:58 unexpected EOF [mysql] 2025/10/17 13:53:27 packets.go:58 unexpected EOF [mysql] 2025/10/17 13:53:28 packets.go:58 unexpected EOF sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 -  }, -  { -  }, -  { -  }, -  }, +  }, -  "0ec1210a11c59b9e0dbad37875c36033bb5b49f8d6019ab13f031eb19bf6bcab", +  "0ec1210a11c59b9e0dbad37875c36033bb5b49f8d6019ab13f031eb19bf6bcab", -  "1091dfe38c67a8fda024884eeae01e505751f06cb9f030a5484ad71b5ec4d9ee", +  "1091dfe38c67a8fda024884eeae01e505751f06cb9f030a5484ad71b5ec4d9ee", -  "2ae68cd165d76f91562789a36a54a196d2eccf055c20698bceb78c4b0d509817", +  "2ae68cd165d76f91562789a36a54a196d2eccf055c20698bceb78c4b0d509817", -  "66239470076d8b803c66c6ed69c31e024e0424a0a108c8047ef01b21abd3f519", +  "66239470076d8b803c66c6ed69c31e024e0424a0a108c8047ef01b21abd3f519", -  "9bedd67668a9ce0b3ee4b0bde86b99e8ab6068a54eb6553368f477e955cc6565", +  "9e471e5155668077d37f2efce5b14a85d6718d18c5f31a2c69ff450223da8259", -  Annotations: map[string]string{ +  Annotations: map[string]string{ +  APIVersion: "", -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  APIVersion: "v1", -  Args: []string{"logrotate"}, +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, -  "bbbaa9f95040662ba210d8e7c4e2ddf5c723d477a45fe5cdb37deeeefba32ac3", +  "bbbaa9f95040662ba210d8e7c4e2ddf5c723d477a45fe5cdb37deeeefba32ac3", -  "c987768299d7cd1d20e1f0977d005ed6482ed9aaabcd5436e34f5fc863ef777b", -  CollisionCount: &0, +  CollisionCount: nil, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-10-17 13:30:29 +0000 UTC"}, -  CreationTimestamp: v1.Time{Time: s"2025-10-17 13:47:43 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-haproxy-7444f96c74", -  CurrentRevision: "some-name-haproxy-9fcc74844", -  CurrentRevision: "some-name-proxysql-565644d847", -  CurrentRevision: "some-name-proxysql-677f78fbbc", -  CurrentRevision: "some-name-proxysql-67cff98d84", -  CurrentRevision: "some-name-proxysql-68fb9cbbbb", -  CurrentRevision: "some-name-proxysql-696c7f59fc", -  CurrentRevision: "some-name-proxysql-9bfbbd5bc", -  CurrentRevision: "some-name-pxc-5489c96774", -  CurrentRevision: "some-name-pxc-677895c7f4", -  CurrentRevision: "some-name-pxc-85bdb9cc7b", -  CurrentRevision: "some-name-pxc-bd977c59c", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, -  Env: []v1.EnvVar{ -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Generation: 5, -  Generation: 6, -  Generation: 7, -  Generation: 8, -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  ImagePullPolicy: "Always", +  "last-applied-secret": "0ec1210a11c59b9e0dbad37875c36033bb5b49f8d6019ab13f031eb19bf6bcab", +  "last-applied-secret": "9bedd67668a9ce0b3ee4b0bde86b99e8ab6068a54eb6553368f477e955cc6565", +  "last-applied-secret": "c987768299d7cd1d20e1f0977d005ed6482ed9aaabcd5436e34f5fc863ef777b", +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", -  {Name: "IS_LOGCOLLECTOR", Value: "yes"}, -  {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, -  Name: "logrotate", -  Name: "logs", -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "SERVICE_TYPE", Value: "mysql"}, +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  ObservedGeneration: 5, -  ObservedGeneration: 6, -  ObservedGeneration: 7, -  ObservedGeneration: 8, -  Operation: "Update", -  Operation: "Update", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGVjMTIxMGExMWM1OWI5ZTBkYmFkMzc4NzVjMzYwMzNiYjViNDlmOGQ2MDE5YWIxM2YwMzFlYjE5YmY2YmNhYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGVjMTIxMGExMWM1OWI5ZTBkYmFkMzc4NzVjMzYwMzNiYjViNDlmOGQ2MDE5YWIxM2YwMzFlYjE5YmY2YmNhYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmFlNjhjZDE2NWQ3NmY5MTU2Mjc4OWEzNmE1NGExOTZkMmVjY2YwNTVjMjA2OThiY2ViNzhjNGIwZDUwOTgxNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTA5MWRmZTM4YzY3YThmZGEwMjQ4ODRlZWFlMDFlNTA1NzUxZjA2Y2I5ZjAzMGE1NDg0YWQ3MWI1ZWM0ZDllZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMTA5MWRmZTM4YzY3YThmZGEwMjQ4ODRlZWFlMDFlNTA1NzUxZjA2Y2I5ZjAzMGE1NDg0YWQ3MWI1ZWM0ZDllZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNjYyMzk0NzAwNzZkOGI4MDNjNjZjNmVkNjljMzFlMDI0ZTA0MjRhMGExMDhjODA0N2VmMDFiMjFhYmQzZjUxOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNjYyMzk0NzAwNzZkOGI4MDNjNjZjNmVkNjljMzFlMDI0ZTA0MjRhMGExMDhjODA0N2VmMDFiMjFhYmQzZjUxOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWJlZGQ2NzY2OGE5Y2UwYjNlZTRiMGJkZTg2Yjk5ZThhYjYwNjhhNTRlYjY1NTMzNjhmNDc3ZTk1NWNjNjU2NSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmJiYWE5Zjk1MDQwNjYyYmEyMTBkOGU3YzRlMmRkZjVjNzIzZDQ3N2E0NWZlNWNkYjM3ZGVlZWVmYmEzMmFjMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmJiYWE5Zjk1MDQwNjYyYmEyMTBkOGU3YzRlMmRkZjVjNzIzZDQ3N2E0NWZlNWNkYjM3ZGVlZWVmYmEzMmFjMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGVjMTIxMGExMWM1OWI5ZTBkYmFkMzc4NzVjMzYwMzNiYjViNDlmOGQ2MDE5YWIxM2YwMzFlYjE5YmY2YmNhYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGVjMTIxMGExMWM1OWI5ZTBkYmFkMzc4NzVjMzYwMzNiYjViNDlmOGQ2MDE5YWIxM2YwMzFlYjE5YmY2YmNhYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWU0NzFlNTE1NTY2ODA3N2QzN2YyZWZjZTViMTRhODVkNjcxOGQxOGM1ZjMxYTJjNjlmZjQ1MDIyM2RhODI1OSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSJ9fSwic3BlYyI6eyJ2b2x1bWVzIjpbeyJuYW1lIjoiaGFwcm94eS1jdXN0b20iLCJjb25maWdNYXAi"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGVjMTIxMGExMWM1OWI5ZTBkYmFkMzc4NzVjMzYwMzNiYjViNDlmOGQ2MDE5YWIxM2YwMzFlYjE5YmY2YmNhYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmFlNjhjZDE2NWQ3NmY5MTU2Mjc4OWEzNmE1NGExOTZkMmVjY2YwNTVjMjA2OThiY2ViNzhjNGIwZDUwOTgxNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmFlNjhjZDE2NWQ3NmY5MTU2Mjc4OWEzNmE1NGExOTZkMmVjY2YwNTVjMjA2OThiY2ViNzhjNGIwZDUwOTgxNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmFlNjhjZDE2NWQ3NmY5MTU2Mjc4OWEzNmE1NGExOTZkMmVjY2YwNTVjMjA2OThiY2ViNzhjNGIwZDUwOTgxNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTk5LWJhYTdkYjJlIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM1LjciLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmFlNjhjZDE2NWQ3NmY5MTU2Mjc4OWEzNmE1NGExOTZkMmVjY2YwNTVjMjA2OThiY2ViNzhjNGIwZDUwOTgxNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTk5LWJhYTdkYjJlIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzUuNyIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiI0MjU0NTIwIn0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmJiYWE5Zjk1MDQwNjYyYmEyMTBkOGU3YzRlMmRkZjVjNzIzZDQ3N2E0NWZlNWNkYjM3ZGVlZWVmYmEzMmFjMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmJiYWE5Zjk1MDQwNjYyYmEyMTBkOGU3YzRlMmRkZjVjNzIzZDQ3N2E0NWZlNWNkYjM3ZGVlZWVmYmEzMmFjMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYzk4Nzc2ODI5OWQ3Y2QxZDIwZTFmMDk3N2QwMDVlZDY0ODJlZDlhYWFiY2Q1NDM2ZTM0ZjVmYzg2M2VmNzc3YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYzk4Nzc2ODI5OWQ3Y2QxZDIwZTFmMDk3N2QwMDVlZDY0ODJlZDlhYWFiY2Q1NDM2ZTM0ZjVmYzg2M2VmNzc3YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOWJlZGQ2NzY2OGE5Y2UwYjNlZTRiMGJkZTg2Yjk5ZThhYjYwNjhhNTRlYjY1NTMzNjhmNDc3ZTk1NWNjNjU2NSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, +  Replicas: 0, -  Replicas: 2, -  Replicas: &2, +  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, +  ResourceVersion: "", -  ResourceVersion: "1760707867903199005", -  ResourceVersion: "1760708049099599011", -  ResourceVersion: "1760708194312175005", -  ResourceVersion: "1760708237470591005", -  ResourceVersion: "1760708251496143005", -  ResourceVersion: "1760708407221711011", -  ResourceVersion: "1760708441801343005", -  ResourceVersion: "1760708521972415005", -  ResourceVersion: "1760708593067967005", -  ResourceVersion: "1760708714493903011", -  ResourceVersion: "1760708756370431005", -  ResourceVersion: "1760708858737583011", -  ResourceVersion: "1760708926569375012", -  ResourceVersion: "1760709024329631011", -  ResourceVersion: "1760709096134255012", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  SchedulerName: "default-scheduler", +  SecurityContext: nil, -  SecurityContext: s"&PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmorProfile:nil,SupplementalGroupsPolicy:nil,SELinux"..., -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2025-10-17 13:30:29 +0000 UTC", -  Time: s"2025-10-17 13:31:07 +0000 UTC", -  Time: s"2025-10-17 13:34:09 +0000 UTC", -  Time: s"2025-10-17 13:36:13 +0000 UTC", -  Time: s"2025-10-17 13:36:34 +0000 UTC", -  Time: s"2025-10-17 13:36:44 +0000 UTC", -  Time: s"2025-10-17 13:37:17 +0000 UTC", -  Time: s"2025-10-17 13:37:30 +0000 UTC", -  Time: s"2025-10-17 13:37:31 +0000 UTC", -  Time: s"2025-10-17 13:37:32 +0000 UTC", -  Time: s"2025-10-17 13:40:07 +0000 UTC", -  Time: s"2025-10-17 13:40:16 +0000 UTC", -  Time: s"2025-10-17 13:40:41 +0000 UTC", -  Time: s"2025-10-17 13:41:30 +0000 UTC", -  Time: s"2025-10-17 13:42:01 +0000 UTC", -  Time: s"2025-10-17 13:42:43 +0000 UTC", -  Time: s"2025-10-17 13:43:13 +0000 UTC", -  Time: s"2025-10-17 13:45:14 +0000 UTC", -  Time: s"2025-10-17 13:45:23 +0000 UTC", -  Time: s"2025-10-17 13:45:56 +0000 UTC", -  Time: s"2025-10-17 13:47:25 +0000 UTC", -  Time: s"2025-10-17 13:47:38 +0000 UTC", -  Time: s"2025-10-17 13:47:43 +0000 UTC", -  Time: s"2025-10-17 13:48:46 +0000 UTC", -  Time: s"2025-10-17 13:50:24 +0000 UTC", -  Time: s"2025-10-17 13:50:30 +0000 UTC", -  Time: s"2025-10-17 13:51:36 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "17c07f8e-34c4-4372-8677-d7b08622f6f8", -  UID: "c87e5d40-26c5-49e2-8577-65be562e2403", -  UID: "dd82c519-ab89-4f8d-8129-862054b48a8a", +  UpdatedReplicas: 0, -  UpdatedReplicas: 1, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-haproxy-7444f96c74", -  UpdateRevision: "some-name-haproxy-9fcc74844", -  UpdateRevision: "some-name-proxysql-565644d847", -  UpdateRevision: "some-name-proxysql-677f78fbbc", -  UpdateRevision: "some-name-proxysql-67cff98d84", -  UpdateRevision: "some-name-proxysql-68fb9cbbbb", -  UpdateRevision: "some-name-proxysql-696c7f59fc", -  UpdateRevision: "some-name-proxysql-9bfbbd5bc", -  UpdateRevision: "some-name-pxc-5489c96774", -  UpdateRevision: "some-name-pxc-677895c7f4", -  UpdateRevision: "some-name-pxc-84968bd476", -  UpdateRevision: "some-name-pxc-85bdb9cc7b", -  UpdateRevision: "some-name-pxc-bd977c59c", +  Value: "caching_sha2_password", -  Value: "mysql_native_password", -  VolumeMode: &"Filesystem", +  VolumeMode: nil, -  VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}},   }    },    },    {    },    },    {    },    }, ""),    },    {    },    },    },    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 3 identical elements    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 8 identical fields    ... // 9 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Annotations: map[string]string{    Args: {"haproxy"},    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMapKeyRef: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 3307,    ContainerPort: 3309,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    ContainerPort: 8404,    Containers: []v1.Container{    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-haproxy"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "READINESS_CHECK_TIMEOUT", Value: "1"}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...},    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    FieldPath: "metadata.name",    FieldPath: "metadata.namespace",    FieldRef: &v1.ObjectFieldSelector{    Finalizers: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostAliases: nil,    HostIP: "",    HostIPC: false,    Hostname: "",    HostPort: 0,    ImagePullPolicy: "Always",    ImagePullSecrets: nil,    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    "kubectl.kubernetes.io/default-container": "haproxy",    "kubectl.kubernetes.io/default-container": "proxysql",    "kubectl.kubernetes.io/default-container": "pxc",    Labels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: nil,    "last-applied-secret": strings.Join({    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-haproxy"},    LocalObjectReference: {Name: "some-name-pxc"},    ManagedFields: nil,    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    {Name: "CLUSTER_HASH", Value: "4254520"},    Name: "config",    Name: "DEFAULT_AUTHENTICATION_PLUGIN",    {Name: "haproxy-auto", VolumeSource: {EmptyDir: &{}}},    Name: "haproxy-custom",    Name: "ist",    {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"},    {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"},    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"},    Name: "mysql-replicas",    {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"},    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}},    Name: "POD_NAME",    Name: "POD_NAMESPASE",    Name: "proxyadm",    Name: "proxy-protocol",    {Name: "READINESS_CHECK_TIMEOUT", Value: "15"},    Name: "some-name-env-vars-haproxy",    Namespace: "users-20579",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}},    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "9bedd67668a9ce0b3ee4b0bde86b99e8ab6068a54eb6553368f477e955cc6565", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "2ae68cd165d76f91562789a36a54a196d2eccf055c20698bceb78c4b0d509817", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "8ed5a334-69d8-4e6e-a61e-c01fc47bc8d7", ...}},    OwnerReferences: nil,    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &2,    Replicas: &3,    ResizePolicy: nil,    ResourceFieldRef: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}},    SecretName: "internal-some-name",    SecretName: "some-name-env-vars-haproxy",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-haproxy",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    ShareProcessNamespace: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: {{Key: "node.alpha.kubernetes.io/unreachable", Operator: "Exists", Effect: "NoExecute", TolerationSeconds: &6000}},    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    Value: "",    ValueFrom: nil,    ValueFrom: &v1.EnvVarSource{    VolumeAttributesClassName: nil,    VolumeClaimTemplates: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-20579 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.GFVvese2zL ++ mktemp + local LAST_ERR=/tmp/tmp.jYV4SeXkCd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GFVvese2zL perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-20579 namespace + cat /tmp/tmp.jYV4SeXkCd + rm /tmp/tmp.GFVvese2zL /tmp/tmp.jYV4SeXkCd + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.y85b0ndaeo ++ mktemp + local LAST_ERR=/tmp/tmp.xAdRH2xg0m + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.y85b0ndaeo No resources found + cat /tmp/tmp.xAdRH2xg0m + rm /tmp/tmp.y85b0ndaeo /tmp/tmp.xAdRH2xg0m + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.tJt1JdhudV ++ mktemp + local LAST_ERR=/tmp/tmp.xnKvgJYqTS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tJt1JdhudV No resources found + cat /tmp/tmp.xnKvgJYqTS + rm /tmp/tmp.tJt1JdhudV /tmp/tmp.xnKvgJYqTS + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.LTj2r0KV2q ++ mktemp + local LAST_ERR=/tmp/tmp.mChMJA0X0w + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LTj2r0KV2q validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.mChMJA0X0w + rm /tmp/tmp.LTj2r0KV2q /tmp/tmp.mChMJA0X0w + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-20579 + rm -rf /tmp/tmp.vQXhoSoFAX ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.yqY5YH6kET + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.Pg9oVOCVqS ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.Lbu8gQhyoc + local exit_status=0 + local LAST_ERR=/tmp/tmp.pvabTX1lrb + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-20579 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator