Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.33) and server (1.31) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.33) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-18344 + local ns=users-18344 + '[' -n pxc-operator ']' + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-5449 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.5ucX9MWyRF ++ mktemp + local LAST_ERR=/tmp/tmp.oZP5Z74zQd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5ucX9MWyRF perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.oZP5Z74zQd + rm /tmp/tmp.5ucX9MWyRF /tmp/tmp.oZP5Z74zQd + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.hBL1ErTxdU ++ mktemp + local LAST_ERR=/tmp/tmp.iduwVxVUUw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hBL1ErTxdU No resources found + cat /tmp/tmp.iduwVxVUUw + rm /tmp/tmp.hBL1ErTxdU /tmp/tmp.iduwVxVUUw + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.79MTUHnXrU ++ mktemp + local LAST_ERR=/tmp/tmp.ZPIAshYbP3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.79MTUHnXrU No resources found + cat /tmp/tmp.ZPIAshYbP3 + rm /tmp/tmp.79MTUHnXrU /tmp/tmp.ZPIAshYbP3 + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.H6Z5gn0xfh ++ mktemp + local LAST_ERR=/tmp/tmp.iyHTaPvMu6 + local exit_status=0 ++ seq 0 2 ++ mktemp + local LAST_OUT=/tmp/tmp.3eUsv14d84 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ mktemp + local LAST_ERR=/tmp/tmp.ltuba2SiuZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3eUsv14d84 + cat /tmp/tmp.ltuba2SiuZ + rm /tmp/tmp.3eUsv14d84 /tmp/tmp.ltuba2SiuZ + return 0 namespace "users-5449" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.H6Z5gn0xfh namespace "pxc-operator" deleted + cat /tmp/tmp.iyHTaPvMu6 + rm /tmp/tmp.H6Z5gn0xfh /tmp/tmp.iyHTaPvMu6 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.gSKF2aZ326 ++ mktemp + local LAST_ERR=/tmp/tmp.9gJcypPD42 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gSKF2aZ326 namespace/pxc-operator created + cat /tmp/tmp.9gJcypPD42 + rm /tmp/tmp.gSKF2aZ326 /tmp/tmp.9gJcypPD42 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.1twXNJYH3l +++ mktemp ++ local LAST_ERR=/tmp/tmp.CW7wZKUH1C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1twXNJYH3l ++ cat /tmp/tmp.CW7wZKUH1C ++ rm /tmp/tmp.1twXNJYH3l /tmp/tmp.CW7wZKUH1C ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2036-c42c1c6c-4-cluster9 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.zu9mollFV2 ++ mktemp + local LAST_ERR=/tmp/tmp.gRhheEfWpR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2036-c42c1c6c-4-cluster9 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zu9mollFV2 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2036-c42c1c6c-4-cluster9" modified. + cat /tmp/tmp.gRhheEfWpR + rm /tmp/tmp.zu9mollFV2 /tmp/tmp.gRhheEfWpR + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.sdHES09to3 ++ mktemp + local LAST_ERR=/tmp/tmp.NmSEM8ICQ7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sdHES09to3 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.NmSEM8ICQ7 + rm /tmp/tmp.sdHES09to3 /tmp/tmp.NmSEM8ICQ7 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.fA6jyptUdN ++ mktemp + local LAST_ERR=/tmp/tmp.jxIkEOgwAQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fA6jyptUdN clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.jxIkEOgwAQ + rm /tmp/tmp.fA6jyptUdN /tmp/tmp.jxIkEOgwAQ + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2036-c42c1c6c^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.pUZHQo5OYh ++ mktemp + local LAST_ERR=/tmp/tmp.WQFAVPrGH4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pUZHQo5OYh deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.WQFAVPrGH4 + rm /tmp/tmp.pUZHQo5OYh /tmp/tmp.WQFAVPrGH4 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.TH5bstSVzm ++ mktemp + local LAST_ERR=/tmp/tmp.cXwdiKbGYP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TH5bstSVzm pod/percona-xtradb-cluster-operator-779b89dbf-mdv77 condition met + cat /tmp/tmp.cXwdiKbGYP + rm /tmp/tmp.TH5bstSVzm /tmp/tmp.cXwdiKbGYP + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.UG8LK6oHQF +++ mktemp ++ local LAST_ERR=/tmp/tmp.hLBU9Nurox ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UG8LK6oHQF ++ cat /tmp/tmp.hLBU9Nurox ++ rm /tmp/tmp.UG8LK6oHQF /tmp/tmp.hLBU9Nurox ++ return 0 + wait_pod percona-xtradb-cluster-operator-779b89dbf-mdv77 480 pxc-operator + local pod=percona-xtradb-cluster-operator-779b89dbf-mdv77 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-779b89dbf-mdv77 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-779b89dbf-mdv77 condition met waiting for pod/percona-xtradb-cluster-operator-779b89dbf-mdv77 to become Ready.Ok + sleep 3 + create_namespace users-18344 + local namespace=users-18344 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces users-18344' ++ mktemp + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-18344 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-18344 ++ mktemp + local LAST_OUT=/tmp/tmp.3lud6D1tp5 + awk '{print$1}' + local LAST_OUT=/tmp/tmp.31nzOKrHFM ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.pmUgqCgHqm + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.SzYju4OMhS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18344 + xargs kubectl delete ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3lud6D1tp5 + cat /tmp/tmp.pmUgqCgHqm + rm /tmp/tmp.3lud6D1tp5 /tmp/tmp.pmUgqCgHqm + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18344 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18344 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.31nzOKrHFM + cat /tmp/tmp.SzYju4OMhS Error from server (NotFound): namespaces "users-18344" not found + rm /tmp/tmp.31nzOKrHFM /tmp/tmp.SzYju4OMhS + return 1 + : + wait_for_delete namespace/users-18344 + local res=namespace/users-18344 + echo -n 'waiting for namespace/users-18344 to be deleted' waiting for namespace/users-18344 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-18344" not found + desc 'create namespace users-18344' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-18344 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-18344 ++ mktemp + local LAST_OUT=/tmp/tmp.dRe7FQ5oBw ++ mktemp + local LAST_ERR=/tmp/tmp.WLKP7yoc05 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-18344 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dRe7FQ5oBw namespace/users-18344 created + cat /tmp/tmp.WLKP7yoc05 + rm /tmp/tmp.dRe7FQ5oBw /tmp/tmp.WLKP7yoc05 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.CzRfXKJL5G +++ mktemp ++ local LAST_ERR=/tmp/tmp.oYvb87yr7f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CzRfXKJL5G ++ cat /tmp/tmp.oYvb87yr7f ++ rm /tmp/tmp.CzRfXKJL5G /tmp/tmp.oYvb87yr7f ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2036-c42c1c6c-4-cluster9 --namespace=users-18344 ++ mktemp + local LAST_OUT=/tmp/tmp.2xIZ0vD3GF ++ mktemp + local LAST_ERR=/tmp/tmp.F5a4Kk8RVm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2036-c42c1c6c-4-cluster9 --namespace=users-18344 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2xIZ0vD3GF Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2036-c42c1c6c-4-cluster9" modified. + cat /tmp/tmp.F5a4Kk8RVm + rm /tmp/tmp.2xIZ0vD3GF /tmp/tmp.F5a4Kk8RVm + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.1r427QWrAK ++ mktemp + local LAST_ERR=/tmp/tmp.2c0zD4xqGZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1r427QWrAK secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.2c0zD4xqGZ + rm /tmp/tmp.1r427QWrAK /tmp/tmp.2c0zD4xqGZ + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.QMtLZy333V ++ mktemp + local LAST_ERR=/tmp/tmp.8WoV29GM23 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QMtLZy333V secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.8WoV29GM23 + rm /tmp/tmp.QMtLZy333V /tmp/tmp.8WoV29GM23 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18344~ + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + local LAST_OUT=/tmp/tmp.eHRcPIfThI + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2036-c42c1c6c#' + local LAST_ERR=/tmp/tmp.RTQP5KcUjN + local exit_status=0 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eHRcPIfThI deployment.apps/pxc-client created + cat /tmp/tmp.RTQP5KcUjN + rm /tmp/tmp.eHRcPIfThI /tmp/tmp.RTQP5KcUjN + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2036-c42c1c6c#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_OUT=/tmp/tmp.HPpL2TOd2C + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.gPY8qzqMOV + local exit_status=0 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18344~ + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HPpL2TOd2C perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.gPY8qzqMOV + rm /tmp/tmp.HPpL2TOd2C /tmp/tmp.gPY8qzqMOV + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.QNd52S1AZz ++++ mktemp +++ local LAST_ERR=/tmp/tmp.cIGgwXEfml +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.QNd52S1AZz +++ cat /tmp/tmp.cIGgwXEfml +++ rm /tmp/tmp.QNd52S1AZz /tmp/tmp.cIGgwXEfml +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.OetTZivanw ++++ mktemp +++ local LAST_ERR=/tmp/tmp.asNO4B2w0r +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.OetTZivanw +++ cat /tmp/tmp.asNO4B2w0r +++ rm /tmp/tmp.OetTZivanw /tmp/tmp.asNO4B2w0r +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18344 ++ mktemp + local LAST_OUT=/tmp/tmp.uZDRVxpQVF ++ mktemp + local LAST_ERR=/tmp/tmp.q7Ah3yhLY0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18344 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18344 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18344 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.uZDRVxpQVF + cat /tmp/tmp.q7Ah3yhLY0 error: no matching resources found + rm /tmp/tmp.uZDRVxpQVF /tmp/tmp.q7Ah3yhLY0 + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.p7peJbnghm +++ mktemp ++ local LAST_ERR=/tmp/tmp.PlDcwGwnTh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p7peJbnghm ++ cat /tmp/tmp.PlDcwGwnTh ++ rm /tmp/tmp.p7peJbnghm /tmp/tmp.PlDcwGwnTh ++ return 0 + local 'root_pass=Av84nqo7[5WhfAg9H' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pyewQwsYsc +++ mktemp ++ local LAST_ERR=/tmp/tmp.g2iqpugdYl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pyewQwsYsc ++ cat /tmp/tmp.g2iqpugdYl Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.pyewQwsYsc /tmp/tmp.g2iqpugdYl ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JuSesOsTF2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yN28uKi5OL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JuSesOsTF2 ++ cat /tmp/tmp.yN28uKi5OL ++ rm /tmp/tmp.JuSesOsTF2 /tmp/tmp.yN28uKi5OL ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7E5HJmlicK +++ mktemp ++ local LAST_ERR=/tmp/tmp.oeQNuVUvPB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7E5HJmlicK ++ cat /tmp/tmp.oeQNuVUvPB ++ rm /tmp/tmp.7E5HJmlicK /tmp/tmp.oeQNuVUvPB ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7fXiElPMeF +++ mktemp ++ local LAST_ERR=/tmp/tmp.VQnwDFHeKy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7fXiElPMeF ++ cat /tmp/tmp.VQnwDFHeKy ++ rm /tmp/tmp.7fXiElPMeF /tmp/tmp.VQnwDFHeKy ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-1.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-1.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-1.sql /tmp/tmp.xYFCLSDYB2/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0WNuyMiMsA +++ mktemp ++ local LAST_ERR=/tmp/tmp.r6WUIfNGSC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0WNuyMiMsA ++ cat /tmp/tmp.r6WUIfNGSC ++ rm /tmp/tmp.0WNuyMiMsA /tmp/tmp.r6WUIfNGSC ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-1.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-1.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-1.sql /tmp/tmp.xYFCLSDYB2/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''Av84nqo7[5WhfAg9H'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y1yGbrIKBD +++ mktemp ++ local LAST_ERR=/tmp/tmp.05i9BNpaHe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y1yGbrIKBD ++ cat /tmp/tmp.05i9BNpaHe ++ rm /tmp/tmp.y1yGbrIKBD /tmp/tmp.05i9BNpaHe ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-1.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-1.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-1.sql /tmp/tmp.xYFCLSDYB2/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RvVyw0KeVh +++ mktemp ++ local LAST_ERR=/tmp/tmp.7btD2Kp0UI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RvVyw0KeVh ++ cat /tmp/tmp.7btD2Kp0UI Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.RvVyw0KeVh /tmp/tmp.7btD2Kp0UI ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.LeIuMMXcNL +++ mktemp ++ local LAST_ERR=/tmp/tmp.5aqNoZMVun ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LeIuMMXcNL ++ cat /tmp/tmp.5aqNoZMVun ++ rm /tmp/tmp.LeIuMMXcNL /tmp/tmp.5aqNoZMVun ++ return 0 + secret_pass='Av84nqo7[5WhfAg9H' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.y8rM4dVN1k +++ mktemp ++ local LAST_ERR=/tmp/tmp.kvEvnE6jfK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y8rM4dVN1k ++ cat /tmp/tmp.kvEvnE6jfK ++ rm /tmp/tmp.y8rM4dVN1k /tmp/tmp.kvEvnE6jfK ++ return 0 + int_secret_pass='Av84nqo7[5WhfAg9H' + [[ -z Av84nqo7[5WhfAg9H ]] + [[ Av84nqo7[5WhfAg9H != \A\v\8\4\n\q\o\7\[\5\W\h\f\A\g\9\H ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''Av84nqo7[5WhfAg9H'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''Av84nqo7[5WhfAg9H'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''Av84nqo7[5WhfAg9H'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''Av84nqo7[5WhfAg9H'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8Zq1hXKRCg +++ mktemp ++ local LAST_ERR=/tmp/tmp.acrKYg0C8E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8Zq1hXKRCg ++ cat /tmp/tmp.acrKYg0C8E ++ rm /tmp/tmp.8Zq1hXKRCg /tmp/tmp.acrKYg0C8E ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-4.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-4.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql /tmp/tmp.xYFCLSDYB2/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Yf50Eu3yoF +++ mktemp ++ local LAST_ERR=/tmp/tmp.htOXIgtoGL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Yf50Eu3yoF ++ cat /tmp/tmp.htOXIgtoGL ++ rm /tmp/tmp.Yf50Eu3yoF /tmp/tmp.htOXIgtoGL ++ return 0 + secret_pass='ygyPQjEdTC-AD7Crge?' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.x9o5GRqIb1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ijGFuyf9j0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x9o5GRqIb1 ++ cat /tmp/tmp.ijGFuyf9j0 ++ rm /tmp/tmp.x9o5GRqIb1 /tmp/tmp.ijGFuyf9j0 ++ return 0 + int_secret_pass='ygyPQjEdTC-AD7Crge?' + [[ -z ygyPQjEdTC-AD7Crge? ]] + [[ ygyPQjEdTC-AD7Crge? != \y\g\y\P\Q\j\E\d\T\C\-\A\D\7\C\r\g\e\? ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''ygyPQjEdTC-AD7Crge?'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''ygyPQjEdTC-AD7Crge?'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''ygyPQjEdTC-AD7Crge?'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''ygyPQjEdTC-AD7Crge?'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5gyTtoojo7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.XhVtx3gAE6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5gyTtoojo7 ++ cat /tmp/tmp.XhVtx3gAE6 ++ rm /tmp/tmp.5gyTtoojo7 /tmp/tmp.XhVtx3gAE6 ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-4.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-4.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql /tmp/tmp.xYFCLSDYB2/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lcq9i4lPfb +++ mktemp ++ local LAST_ERR=/tmp/tmp.HW1v5NQrs1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lcq9i4lPfb ++ cat /tmp/tmp.HW1v5NQrs1 ++ rm /tmp/tmp.Lcq9i4lPfb /tmp/tmp.HW1v5NQrs1 ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.JWzyG0pbGd +++ mktemp ++ local LAST_ERR=/tmp/tmp.w2woN3yN4k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JWzyG0pbGd ++ cat /tmp/tmp.w2woN3yN4k ++ rm /tmp/tmp.JWzyG0pbGd /tmp/tmp.w2woN3yN4k ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c9kZstHnbl +++ mktemp ++ local LAST_ERR=/tmp/tmp.CWdchWgc4H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c9kZstHnbl ++ cat /tmp/tmp.CWdchWgc4H ++ rm /tmp/tmp.c9kZstHnbl /tmp/tmp.CWdchWgc4H ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-4.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-4.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql /tmp/tmp.xYFCLSDYB2/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.faFPNCbvGR +++ mktemp ++ local LAST_ERR=/tmp/tmp.5kYF9rndyy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.faFPNCbvGR ++ cat /tmp/tmp.5kYF9rndyy ++ rm /tmp/tmp.faFPNCbvGR /tmp/tmp.5kYF9rndyy ++ return 0 + secret_pass='}$TtNRbSibuSs3@ ]] + [[ ]WYVVLCsbTu%dW>3@ != \]\W\Y\V\V\L\C\s\b\T\u\%\d\W\>\3\@ ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\'']WYVVLCsbTu%dW>3@'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\'']WYVVLCsbTu%dW>3@'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\'']WYVVLCsbTu%dW>3@'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\'']WYVVLCsbTu%dW>3@'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ycf6JwTJun +++ mktemp ++ local LAST_ERR=/tmp/tmp.MK1W59mQGq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ycf6JwTJun ++ cat /tmp/tmp.MK1W59mQGq ++ rm /tmp/tmp.Ycf6JwTJun /tmp/tmp.MK1W59mQGq ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-4.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-4.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql /tmp/tmp.xYFCLSDYB2/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.mB2568HY0F +++ mktemp ++ local LAST_ERR=/tmp/tmp.OgmEV3EuMz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mB2568HY0F ++ cat /tmp/tmp.OgmEV3EuMz ++ rm /tmp/tmp.mB2568HY0F /tmp/tmp.OgmEV3EuMz ++ return 0 + secret_pass='GKu$pelq5ddHJ@mT6' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.FZlBC4huyJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Dn1IUsvbPd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FZlBC4huyJ ++ cat /tmp/tmp.Dn1IUsvbPd ++ rm /tmp/tmp.FZlBC4huyJ /tmp/tmp.Dn1IUsvbPd ++ return 0 + int_secret_pass='GKu$pelq5ddHJ@mT6' + [[ -z GKu$pelq5ddHJ@mT6 ]] + [[ GKu$pelq5ddHJ@mT6 != \G\K\u\$\p\e\l\q\5\d\d\H\J\@\m\T\6 ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''GKu$pelq5ddHJ@mT6'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''GKu$pelq5ddHJ@mT6'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''GKu$pelq5ddHJ@mT6'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''GKu$pelq5ddHJ@mT6'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dwp9xIw2kP +++ mktemp ++ local LAST_ERR=/tmp/tmp.1q1Bgm2qh8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dwp9xIw2kP ++ cat /tmp/tmp.1q1Bgm2qh8 ++ rm /tmp/tmp.dwp9xIw2kP /tmp/tmp.1q1Bgm2qh8 ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-4.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-4.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql /tmp/tmp.xYFCLSDYB2/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.BjLRmzbZF9 ++ mktemp + local LAST_ERR=/tmp/tmp.vAO6wiE0az + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BjLRmzbZF9 secret/my-cluster-secrets patched + cat /tmp/tmp.vAO6wiE0az + rm /tmp/tmp.BjLRmzbZF9 /tmp/tmp.vAO6wiE0az + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gpeEaiXifz +++ mktemp ++ local LAST_ERR=/tmp/tmp.TL4cprHdP7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gpeEaiXifz ++ cat /tmp/tmp.TL4cprHdP7 ++ rm /tmp/tmp.gpeEaiXifz /tmp/tmp.TL4cprHdP7 ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-4.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-4.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql /tmp/tmp.xYFCLSDYB2/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Ai6V5E8Z0n ++ mktemp + local LAST_ERR=/tmp/tmp.j3E3BBTttE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ai6V5E8Z0n perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.j3E3BBTttE + rm /tmp/tmp.Ai6V5E8Z0n /tmp/tmp.j3E3BBTttE + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ttb8wQAJIF +++ mktemp ++ local LAST_ERR=/tmp/tmp.roXu4HIfh3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ttb8wQAJIF ++ cat /tmp/tmp.roXu4HIfh3 ++ rm /tmp/tmp.ttb8wQAJIF /tmp/tmp.roXu4HIfh3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ym00Ucrmcb +++ mktemp ++ local LAST_ERR=/tmp/tmp.66w9UABoE2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ym00Ucrmcb ++ cat /tmp/tmp.66w9UABoE2 ++ rm /tmp/tmp.Ym00Ucrmcb /tmp/tmp.66w9UABoE2 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R85UUp60y0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6HTUZAKvkK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.R85UUp60y0 ++ cat /tmp/tmp.6HTUZAKvkK ++ rm /tmp/tmp.R85UUp60y0 /tmp/tmp.6HTUZAKvkK ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.DdpEG5o8Bd ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.KjtTPwsaLe +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.DdpEG5o8Bd +++++ cat /tmp/tmp.KjtTPwsaLe +++++ rm /tmp/tmp.DdpEG5o8Bd /tmp/tmp.KjtTPwsaLe +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.BhlJnDJths ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4KA0lsL6BJ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.BhlJnDJths +++++ cat /tmp/tmp.4KA0lsL6BJ +++++ rm /tmp/tmp.BhlJnDJths /tmp/tmp.4KA0lsL6BJ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p9lhBKjbfv +++ mktemp ++ local LAST_ERR=/tmp/tmp.q8dSGlvDFq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p9lhBKjbfv ++ cat /tmp/tmp.q8dSGlvDFq ++ rm /tmp/tmp.p9lhBKjbfv /tmp/tmp.q8dSGlvDFq ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ohnwLp3Rdq ++ mktemp + local LAST_ERR=/tmp/tmp.ja7J0fclao + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ohnwLp3Rdq secret/my-cluster-secrets patched + cat /tmp/tmp.ja7J0fclao + rm /tmp/tmp.ohnwLp3Rdq /tmp/tmp.ja7J0fclao + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5z3DOcbHTG +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rilc2ZzDI5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5z3DOcbHTG ++ cat /tmp/tmp.Rilc2ZzDI5 ++ rm /tmp/tmp.5z3DOcbHTG /tmp/tmp.Rilc2ZzDI5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WyEz6t6q2C +++ mktemp ++ local LAST_ERR=/tmp/tmp.rQ3rp2wDjd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WyEz6t6q2C ++ cat /tmp/tmp.rQ3rp2wDjd ++ rm /tmp/tmp.WyEz6t6q2C /tmp/tmp.rQ3rp2wDjd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1g9a5ZHYEw +++ mktemp ++ local LAST_ERR=/tmp/tmp.VhJGONZOK0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1g9a5ZHYEw ++ cat /tmp/tmp.VhJGONZOK0 ++ rm /tmp/tmp.1g9a5ZHYEw /tmp/tmp.VhJGONZOK0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kZtEAAyn8I +++ mktemp ++ local LAST_ERR=/tmp/tmp.FP5Gqk5wHF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kZtEAAyn8I ++ cat /tmp/tmp.FP5Gqk5wHF ++ rm /tmp/tmp.kZtEAAyn8I /tmp/tmp.FP5Gqk5wHF ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jwN3bScXuu +++ mktemp ++ local LAST_ERR=/tmp/tmp.y7vv6LRmNB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jwN3bScXuu ++ cat /tmp/tmp.y7vv6LRmNB ++ rm /tmp/tmp.jwN3bScXuu /tmp/tmp.y7vv6LRmNB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xxDgVD8uJG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.YUpGXwJWOI +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xxDgVD8uJG +++++ cat /tmp/tmp.YUpGXwJWOI +++++ rm /tmp/tmp.xxDgVD8uJG /tmp/tmp.YUpGXwJWOI +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AeES7FD8bt ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.9QJqKvzWJR +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AeES7FD8bt +++++ cat /tmp/tmp.9QJqKvzWJR +++++ rm /tmp/tmp.AeES7FD8bt /tmp/tmp.9QJqKvzWJR +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2dVRFJFjoJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.XU8toVBdXG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2dVRFJFjoJ ++ cat /tmp/tmp.XU8toVBdXG ++ rm /tmp/tmp.2dVRFJFjoJ /tmp/tmp.XU8toVBdXG ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.xYFCLSDYB2/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-2.sql /tmp/tmp.xYFCLSDYB2/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.xYFCLSDYB2/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-2.sql /tmp/tmp.xYFCLSDYB2/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.xYFCLSDYB2/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-2.sql /tmp/tmp.xYFCLSDYB2/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.bvuoIyiq6P ++ mktemp + local LAST_ERR=/tmp/tmp.JQ7EzPJrW7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bvuoIyiq6P perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.JQ7EzPJrW7 + rm /tmp/tmp.bvuoIyiq6P /tmp/tmp.JQ7EzPJrW7 + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.iIUXOf4mre ++ mktemp + local LAST_ERR=/tmp/tmp.Mb9gJW0wSU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iIUXOf4mre secret/my-cluster-secrets patched + cat /tmp/tmp.Mb9gJW0wSU + rm /tmp/tmp.iIUXOf4mre /tmp/tmp.Mb9gJW0wSU + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fAXHvKsg2C +++ mktemp ++ local LAST_ERR=/tmp/tmp.jhB6KDBLBQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fAXHvKsg2C ++ cat /tmp/tmp.jhB6KDBLBQ ++ rm /tmp/tmp.fAXHvKsg2C /tmp/tmp.jhB6KDBLBQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XEaNyAxlTl +++ mktemp ++ local LAST_ERR=/tmp/tmp.2ILIvZp4we ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XEaNyAxlTl ++ cat /tmp/tmp.2ILIvZp4we ++ rm /tmp/tmp.XEaNyAxlTl /tmp/tmp.2ILIvZp4we ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b3JoHUnERN +++ mktemp ++ local LAST_ERR=/tmp/tmp.m6p0z9QBHg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b3JoHUnERN ++ cat /tmp/tmp.m6p0z9QBHg ++ rm /tmp/tmp.b3JoHUnERN /tmp/tmp.m6p0z9QBHg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9ZKqycAosE +++ mktemp ++ local LAST_ERR=/tmp/tmp.AZgbrRqyDg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9ZKqycAosE ++ cat /tmp/tmp.AZgbrRqyDg ++ rm /tmp/tmp.9ZKqycAosE /tmp/tmp.AZgbrRqyDg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QhoLQ1FyT7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Sjty8zmLuc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QhoLQ1FyT7 ++ cat /tmp/tmp.Sjty8zmLuc ++ rm /tmp/tmp.QhoLQ1FyT7 /tmp/tmp.Sjty8zmLuc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UYHlBHFRlR +++ mktemp ++ local LAST_ERR=/tmp/tmp.i6qiNM8gwM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UYHlBHFRlR ++ cat /tmp/tmp.i6qiNM8gwM ++ rm /tmp/tmp.UYHlBHFRlR /tmp/tmp.i6qiNM8gwM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9BRVHqbK4u +++ mktemp ++ local LAST_ERR=/tmp/tmp.PobLn3jKXb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9BRVHqbK4u ++ cat /tmp/tmp.PobLn3jKXb ++ rm /tmp/tmp.9BRVHqbK4u /tmp/tmp.PobLn3jKXb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ezmKyg1fmo +++ mktemp ++ local LAST_ERR=/tmp/tmp.HWYDfC3Q1e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ezmKyg1fmo ++ cat /tmp/tmp.HWYDfC3Q1e ++ rm /tmp/tmp.ezmKyg1fmo /tmp/tmp.HWYDfC3Q1e ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BzRQOwX6Ys +++ mktemp ++ local LAST_ERR=/tmp/tmp.OSNqrnxgqA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BzRQOwX6Ys ++ cat /tmp/tmp.OSNqrnxgqA ++ rm /tmp/tmp.BzRQOwX6Ys /tmp/tmp.OSNqrnxgqA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8mfo35jqfD +++ mktemp ++ local LAST_ERR=/tmp/tmp.VQA78v3tdf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8mfo35jqfD ++ cat /tmp/tmp.VQA78v3tdf ++ rm /tmp/tmp.8mfo35jqfD /tmp/tmp.VQA78v3tdf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JCWquYAbjx +++ mktemp ++ local LAST_ERR=/tmp/tmp.AYg1oQB4r9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JCWquYAbjx ++ cat /tmp/tmp.AYg1oQB4r9 ++ rm /tmp/tmp.JCWquYAbjx /tmp/tmp.AYg1oQB4r9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sSPWIL5Gu9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1k9tKvJEFx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sSPWIL5Gu9 ++ cat /tmp/tmp.1k9tKvJEFx ++ rm /tmp/tmp.sSPWIL5Gu9 /tmp/tmp.1k9tKvJEFx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CcOggiGlrS +++ mktemp ++ local LAST_ERR=/tmp/tmp.0zt3FdgPsB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CcOggiGlrS ++ cat /tmp/tmp.0zt3FdgPsB ++ rm /tmp/tmp.CcOggiGlrS /tmp/tmp.0zt3FdgPsB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PE8xNAfNj2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jkFinUMHZf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PE8xNAfNj2 ++ cat /tmp/tmp.jkFinUMHZf ++ rm /tmp/tmp.PE8xNAfNj2 /tmp/tmp.jkFinUMHZf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.94he1rzvAW +++ mktemp ++ local LAST_ERR=/tmp/tmp.hgWWkb09B2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.94he1rzvAW ++ cat /tmp/tmp.hgWWkb09B2 ++ rm /tmp/tmp.94he1rzvAW /tmp/tmp.hgWWkb09B2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pd8nCq1ZOt +++ mktemp ++ local LAST_ERR=/tmp/tmp.SQ2W6VXvfy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pd8nCq1ZOt ++ cat /tmp/tmp.SQ2W6VXvfy ++ rm /tmp/tmp.pd8nCq1ZOt /tmp/tmp.SQ2W6VXvfy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kiYnPT6Xyh +++ mktemp ++ local LAST_ERR=/tmp/tmp.uCIeZ8DnEA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kiYnPT6Xyh ++ cat /tmp/tmp.uCIeZ8DnEA ++ rm /tmp/tmp.kiYnPT6Xyh /tmp/tmp.uCIeZ8DnEA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bn5SxaYZkQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.3Suh7hH5Sf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bn5SxaYZkQ ++ cat /tmp/tmp.3Suh7hH5Sf ++ rm /tmp/tmp.bn5SxaYZkQ /tmp/tmp.3Suh7hH5Sf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eLpn2Ri63n +++ mktemp ++ local LAST_ERR=/tmp/tmp.3Pj0dhQSy5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eLpn2Ri63n ++ cat /tmp/tmp.3Pj0dhQSy5 ++ rm /tmp/tmp.eLpn2Ri63n /tmp/tmp.3Pj0dhQSy5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8z6g2h3RQC +++ mktemp ++ local LAST_ERR=/tmp/tmp.znwXXXn7VD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8z6g2h3RQC ++ cat /tmp/tmp.znwXXXn7VD ++ rm /tmp/tmp.8z6g2h3RQC /tmp/tmp.znwXXXn7VD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2gTWrlhvhW +++ mktemp ++ local LAST_ERR=/tmp/tmp.VGQBcLJqhh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2gTWrlhvhW ++ cat /tmp/tmp.VGQBcLJqhh ++ rm /tmp/tmp.2gTWrlhvhW /tmp/tmp.VGQBcLJqhh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IH6mQjAmyT +++ mktemp ++ local LAST_ERR=/tmp/tmp.bgAL8RNZbO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IH6mQjAmyT ++ cat /tmp/tmp.bgAL8RNZbO ++ rm /tmp/tmp.IH6mQjAmyT /tmp/tmp.bgAL8RNZbO ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qjd2UDwwyq +++ mktemp ++ local LAST_ERR=/tmp/tmp.0XyRZ6Nn0g ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qjd2UDwwyq ++ cat /tmp/tmp.0XyRZ6Nn0g ++ rm /tmp/tmp.qjd2UDwwyq /tmp/tmp.0XyRZ6Nn0g ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.m5mfHc4gIo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.T8NhYuiZJd +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.m5mfHc4gIo +++++ cat /tmp/tmp.T8NhYuiZJd +++++ rm /tmp/tmp.m5mfHc4gIo /tmp/tmp.T8NhYuiZJd +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.yrNspaFSif ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mXCcANHJDm +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.yrNspaFSif +++++ cat /tmp/tmp.mXCcANHJDm +++++ rm /tmp/tmp.yrNspaFSif /tmp/tmp.mXCcANHJDm +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rAcWVnGJxV +++ mktemp ++ local LAST_ERR=/tmp/tmp.9PNpTa4hvf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rAcWVnGJxV ++ cat /tmp/tmp.9PNpTa4hvf ++ rm /tmp/tmp.rAcWVnGJxV /tmp/tmp.9PNpTa4hvf ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.xYFCLSDYB2/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-3.sql /tmp/tmp.xYFCLSDYB2/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.wKWmWLsw53 ++ mktemp + local LAST_ERR=/tmp/tmp.gYhgXujr1B + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wKWmWLsw53 secret/my-cluster-secrets patched + cat /tmp/tmp.gYhgXujr1B + rm /tmp/tmp.wKWmWLsw53 /tmp/tmp.gYhgXujr1B + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.5qPkvRFbpc +++ mktemp ++ local LAST_ERR=/tmp/tmp.PR3eAQk0wD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5qPkvRFbpc ++ cat /tmp/tmp.PR3eAQk0wD ++ rm /tmp/tmp.5qPkvRFbpc /tmp/tmp.PR3eAQk0wD ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Da1PactYS5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.e85T1zBgne ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Da1PactYS5 ++ cat /tmp/tmp.e85T1zBgne ++ rm /tmp/tmp.Da1PactYS5 /tmp/tmp.e85T1zBgne ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Tpanlz7Mea +++ mktemp ++ local LAST_ERR=/tmp/tmp.9ckoQZ0faz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Tpanlz7Mea ++ cat /tmp/tmp.9ckoQZ0faz ++ rm /tmp/tmp.Tpanlz7Mea /tmp/tmp.9ckoQZ0faz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wPuvcWQk6d +++ mktemp ++ local LAST_ERR=/tmp/tmp.AT4H17NhK9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wPuvcWQk6d ++ cat /tmp/tmp.AT4H17NhK9 ++ rm /tmp/tmp.wPuvcWQk6d /tmp/tmp.AT4H17NhK9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hfiXrVFyjs +++ mktemp ++ local LAST_ERR=/tmp/tmp.CYJ81GMZVz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hfiXrVFyjs ++ cat /tmp/tmp.CYJ81GMZVz ++ rm /tmp/tmp.hfiXrVFyjs /tmp/tmp.CYJ81GMZVz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hcMg5V7bt3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.JfBpH1I0ld ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hcMg5V7bt3 ++ cat /tmp/tmp.JfBpH1I0ld ++ rm /tmp/tmp.hcMg5V7bt3 /tmp/tmp.JfBpH1I0ld ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KoYKxCbL9i +++ mktemp ++ local LAST_ERR=/tmp/tmp.kloqlcHZsy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KoYKxCbL9i ++ cat /tmp/tmp.kloqlcHZsy ++ rm /tmp/tmp.KoYKxCbL9i /tmp/tmp.kloqlcHZsy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RwYNufT5Ab +++ mktemp ++ local LAST_ERR=/tmp/tmp.OdUL6DeV0i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RwYNufT5Ab ++ cat /tmp/tmp.OdUL6DeV0i ++ rm /tmp/tmp.RwYNufT5Ab /tmp/tmp.OdUL6DeV0i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BiSurlAoY9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AzfT9HhLQE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BiSurlAoY9 ++ cat /tmp/tmp.AzfT9HhLQE ++ rm /tmp/tmp.BiSurlAoY9 /tmp/tmp.AzfT9HhLQE ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.510Ndr3OT2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sl16ywpTHl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.510Ndr3OT2 ++ cat /tmp/tmp.sl16ywpTHl ++ rm /tmp/tmp.510Ndr3OT2 /tmp/tmp.sl16ywpTHl ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.s2LLoDVBQz ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.NX37PEfvAT +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.s2LLoDVBQz +++++ cat /tmp/tmp.NX37PEfvAT +++++ rm /tmp/tmp.s2LLoDVBQz /tmp/tmp.NX37PEfvAT +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.gui73Clesx ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ioGmd7ruln +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.gui73Clesx +++++ cat /tmp/tmp.ioGmd7ruln +++++ rm /tmp/tmp.gui73Clesx /tmp/tmp.ioGmd7ruln +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7lERJHHVwC +++ mktemp ++ local LAST_ERR=/tmp/tmp.orY11ITmvV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7lERJHHVwC ++ cat /tmp/tmp.orY11ITmvV ++ rm /tmp/tmp.7lERJHHVwC /tmp/tmp.orY11ITmvV ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CiMnnzPWdB +++ mktemp ++ local LAST_ERR=/tmp/tmp.eyvcJzFCR3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CiMnnzPWdB ++ cat /tmp/tmp.eyvcJzFCR3 ++ rm /tmp/tmp.CiMnnzPWdB /tmp/tmp.eyvcJzFCR3 ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-4.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-4.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql /tmp/tmp.xYFCLSDYB2/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ePIJPmBwYD ++ mktemp + local LAST_ERR=/tmp/tmp.bggF3xSBwE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ePIJPmBwYD secret/my-cluster-secrets patched + cat /tmp/tmp.bggF3xSBwE + rm /tmp/tmp.ePIJPmBwYD /tmp/tmp.bggF3xSBwE + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OUBMCMG663 +++ mktemp ++ local LAST_ERR=/tmp/tmp.t51Ub4wrlH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OUBMCMG663 ++ cat /tmp/tmp.t51Ub4wrlH ++ rm /tmp/tmp.OUBMCMG663 /tmp/tmp.t51Ub4wrlH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uZGBsN6lf5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.L8WnphN9Iz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uZGBsN6lf5 ++ cat /tmp/tmp.L8WnphN9Iz ++ rm /tmp/tmp.uZGBsN6lf5 /tmp/tmp.L8WnphN9Iz ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SRcCCCsxOO +++ mktemp ++ local LAST_ERR=/tmp/tmp.t1540XcIBT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SRcCCCsxOO ++ cat /tmp/tmp.t1540XcIBT ++ rm /tmp/tmp.SRcCCCsxOO /tmp/tmp.t1540XcIBT ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kPl0tfMo49 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.sB9mYbJWNz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kPl0tfMo49 +++++ cat /tmp/tmp.sB9mYbJWNz +++++ rm /tmp/tmp.kPl0tfMo49 /tmp/tmp.sB9mYbJWNz +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AqZ8eSlxWi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1cHdwZ0nen +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AqZ8eSlxWi +++++ cat /tmp/tmp.1cHdwZ0nen +++++ rm /tmp/tmp.AqZ8eSlxWi /tmp/tmp.1cHdwZ0nen +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qZouTSUpcl +++ mktemp ++ local LAST_ERR=/tmp/tmp.ij8VKbAaxV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qZouTSUpcl ++ cat /tmp/tmp.ij8VKbAaxV ++ rm /tmp/tmp.qZouTSUpcl /tmp/tmp.ij8VKbAaxV ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ArMIY10zmx +++ mktemp ++ local LAST_ERR=/tmp/tmp.wZ04qZgvPr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ArMIY10zmx ++ cat /tmp/tmp.wZ04qZgvPr ++ rm /tmp/tmp.ArMIY10zmx /tmp/tmp.wZ04qZgvPr ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-4.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-4.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql /tmp/tmp.xYFCLSDYB2/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.8bpTrvogp9 ++ mktemp + local LAST_ERR=/tmp/tmp.fN3ht9MDOa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8bpTrvogp9 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.fN3ht9MDOa + rm /tmp/tmp.8bpTrvogp9 /tmp/tmp.fN3ht9MDOa + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.seMOvoN0ck +++ mktemp ++ local LAST_ERR=/tmp/tmp.gYSOVUaAge ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.seMOvoN0ck ++ cat /tmp/tmp.gYSOVUaAge ++ rm /tmp/tmp.seMOvoN0ck /tmp/tmp.gYSOVUaAge ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cbkkMEd4pt +++ mktemp ++ local LAST_ERR=/tmp/tmp.OD8xo6dciq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cbkkMEd4pt ++ cat /tmp/tmp.OD8xo6dciq ++ rm /tmp/tmp.cbkkMEd4pt /tmp/tmp.OD8xo6dciq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4zBUcqSHtG +++ mktemp ++ local LAST_ERR=/tmp/tmp.V9CN6LvuKu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4zBUcqSHtG ++ cat /tmp/tmp.V9CN6LvuKu ++ rm /tmp/tmp.4zBUcqSHtG /tmp/tmp.V9CN6LvuKu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JrnBs4mueY +++ mktemp ++ local LAST_ERR=/tmp/tmp.BQWDDuB2mI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JrnBs4mueY ++ cat /tmp/tmp.BQWDDuB2mI ++ rm /tmp/tmp.JrnBs4mueY /tmp/tmp.BQWDDuB2mI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jrHLgditdZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.LhgoxD1Yr1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jrHLgditdZ ++ cat /tmp/tmp.LhgoxD1Yr1 ++ rm /tmp/tmp.jrHLgditdZ /tmp/tmp.LhgoxD1Yr1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jRhH6kxhQu +++ mktemp ++ local LAST_ERR=/tmp/tmp.HALmWzELS8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jRhH6kxhQu ++ cat /tmp/tmp.HALmWzELS8 ++ rm /tmp/tmp.jRhH6kxhQu /tmp/tmp.HALmWzELS8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qKbqB0qVmM +++ mktemp ++ local LAST_ERR=/tmp/tmp.daQVkmtXDm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qKbqB0qVmM ++ cat /tmp/tmp.daQVkmtXDm ++ rm /tmp/tmp.qKbqB0qVmM /tmp/tmp.daQVkmtXDm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UbKUjVzrmh +++ mktemp ++ local LAST_ERR=/tmp/tmp.1NEoHp20fJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UbKUjVzrmh ++ cat /tmp/tmp.1NEoHp20fJ ++ rm /tmp/tmp.UbKUjVzrmh /tmp/tmp.1NEoHp20fJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bN1GaX7v2F +++ mktemp ++ local LAST_ERR=/tmp/tmp.XIHaPbzZR4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bN1GaX7v2F ++ cat /tmp/tmp.XIHaPbzZR4 ++ rm /tmp/tmp.bN1GaX7v2F /tmp/tmp.XIHaPbzZR4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.USVto7ixX5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gkja1fVprv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.USVto7ixX5 ++ cat /tmp/tmp.gkja1fVprv ++ rm /tmp/tmp.USVto7ixX5 /tmp/tmp.gkja1fVprv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dQ77o0Zd5T +++ mktemp ++ local LAST_ERR=/tmp/tmp.30L9HdlMkO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dQ77o0Zd5T ++ cat /tmp/tmp.30L9HdlMkO ++ rm /tmp/tmp.dQ77o0Zd5T /tmp/tmp.30L9HdlMkO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V8ptQH64Vc +++ mktemp ++ local LAST_ERR=/tmp/tmp.FypVyFtiLf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V8ptQH64Vc ++ cat /tmp/tmp.FypVyFtiLf ++ rm /tmp/tmp.V8ptQH64Vc /tmp/tmp.FypVyFtiLf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dJDv8eYuXf +++ mktemp ++ local LAST_ERR=/tmp/tmp.tK2J0JmWdz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dJDv8eYuXf ++ cat /tmp/tmp.tK2J0JmWdz ++ rm /tmp/tmp.dJDv8eYuXf /tmp/tmp.tK2J0JmWdz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BNkW2Q1Q24 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qxtw7USvSg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BNkW2Q1Q24 ++ cat /tmp/tmp.qxtw7USvSg ++ rm /tmp/tmp.BNkW2Q1Q24 /tmp/tmp.qxtw7USvSg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YKPlcv3OoX +++ mktemp ++ local LAST_ERR=/tmp/tmp.EJBjJvPLQV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YKPlcv3OoX ++ cat /tmp/tmp.EJBjJvPLQV ++ rm /tmp/tmp.YKPlcv3OoX /tmp/tmp.EJBjJvPLQV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J5xVepRVCS +++ mktemp ++ local LAST_ERR=/tmp/tmp.X0t49mjW2l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J5xVepRVCS ++ cat /tmp/tmp.X0t49mjW2l ++ rm /tmp/tmp.J5xVepRVCS /tmp/tmp.X0t49mjW2l ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JOu0liYNv0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4hvgXdPYCv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JOu0liYNv0 ++ cat /tmp/tmp.4hvgXdPYCv ++ rm /tmp/tmp.JOu0liYNv0 /tmp/tmp.4hvgXdPYCv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cfcaaWUx43 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oAAdXmdRKr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cfcaaWUx43 ++ cat /tmp/tmp.oAAdXmdRKr ++ rm /tmp/tmp.cfcaaWUx43 /tmp/tmp.oAAdXmdRKr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zHogYFcz6j +++ mktemp ++ local LAST_ERR=/tmp/tmp.X7I5ubkvjc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zHogYFcz6j ++ cat /tmp/tmp.X7I5ubkvjc ++ rm /tmp/tmp.zHogYFcz6j /tmp/tmp.X7I5ubkvjc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6B8VsKYc7p +++ mktemp ++ local LAST_ERR=/tmp/tmp.kJlpWvNclU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6B8VsKYc7p ++ cat /tmp/tmp.kJlpWvNclU ++ rm /tmp/tmp.6B8VsKYc7p /tmp/tmp.kJlpWvNclU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.29nVhUOcze +++ mktemp ++ local LAST_ERR=/tmp/tmp.4AqMafS1Pa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.29nVhUOcze ++ cat /tmp/tmp.4AqMafS1Pa ++ rm /tmp/tmp.29nVhUOcze /tmp/tmp.4AqMafS1Pa ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Av1QC6J3Mk ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ojfRFbTjsg +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Av1QC6J3Mk +++++ cat /tmp/tmp.ojfRFbTjsg +++++ rm /tmp/tmp.Av1QC6J3Mk /tmp/tmp.ojfRFbTjsg +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2YFek2uRNE ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.lGycHdBJ38 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2YFek2uRNE +++++ cat /tmp/tmp.lGycHdBJ38 +++++ rm /tmp/tmp.2YFek2uRNE /tmp/tmp.lGycHdBJ38 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bSQnkOcC0A +++ mktemp ++ local LAST_ERR=/tmp/tmp.UIidefiGbO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bSQnkOcC0A ++ cat /tmp/tmp.UIidefiGbO ++ rm /tmp/tmp.bSQnkOcC0A /tmp/tmp.UIidefiGbO ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.BBI24xTmc4 ++ mktemp + local LAST_ERR=/tmp/tmp.PEtDPOUVxQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BBI24xTmc4 secret/my-cluster-secrets-2 patched + cat /tmp/tmp.PEtDPOUVxQ + rm /tmp/tmp.BBI24xTmc4 /tmp/tmp.PEtDPOUVxQ + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pjuisUlfFL +++ mktemp ++ local LAST_ERR=/tmp/tmp.okEt8TIOKK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pjuisUlfFL ++ cat /tmp/tmp.okEt8TIOKK ++ rm /tmp/tmp.pjuisUlfFL /tmp/tmp.okEt8TIOKK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oilasFwhco +++ mktemp ++ local LAST_ERR=/tmp/tmp.dyHLEZb3ID ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oilasFwhco ++ cat /tmp/tmp.dyHLEZb3ID ++ rm /tmp/tmp.oilasFwhco /tmp/tmp.dyHLEZb3ID ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9hOZJfFIj3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.TmL9ByLiIF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9hOZJfFIj3 ++ cat /tmp/tmp.TmL9ByLiIF ++ rm /tmp/tmp.9hOZJfFIj3 /tmp/tmp.TmL9ByLiIF ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4EctV5jtVV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ROZtNvdoCk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4EctV5jtVV ++ cat /tmp/tmp.ROZtNvdoCk ++ rm /tmp/tmp.4EctV5jtVV /tmp/tmp.ROZtNvdoCk ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mRBEEU5Ar9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mhL1l3gVsa +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mRBEEU5Ar9 +++++ cat /tmp/tmp.mhL1l3gVsa +++++ rm /tmp/tmp.mRBEEU5Ar9 /tmp/tmp.mhL1l3gVsa +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.efh4CEKHZt ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.gLX52oxxCK +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.efh4CEKHZt +++++ cat /tmp/tmp.gLX52oxxCK +++++ rm /tmp/tmp.efh4CEKHZt /tmp/tmp.gLX52oxxCK +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4FSQcQlw6P +++ mktemp ++ local LAST_ERR=/tmp/tmp.AmizbD1cMG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4FSQcQlw6P ++ cat /tmp/tmp.AmizbD1cMG ++ rm /tmp/tmp.4FSQcQlw6P /tmp/tmp.AmizbD1cMG ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QoYqLRYXU1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6yDnku71ls ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QoYqLRYXU1 ++ cat /tmp/tmp.6yDnku71ls ++ rm /tmp/tmp.QoYqLRYXU1 /tmp/tmp.6yDnku71ls ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-4.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-4.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql /tmp/tmp.xYFCLSDYB2/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.eTtkWup7zv +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ge7Tm6gsPh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eTtkWup7zv ++ cat /tmp/tmp.Ge7Tm6gsPh ++ rm /tmp/tmp.eTtkWup7zv /tmp/tmp.Ge7Tm6gsPh ++ return 0 + newpass='<&yLTB]U[L*FG@w?' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''<&yLTB]U[L*FG@w?'\'';' '-h some-name-pxc -uroot -p'\''<&yLTB]U[L*FG@w?'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''<&yLTB]U[L*FG@w?'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''<&yLTB]U[L*FG@w?'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WaPfeuaztM +++ mktemp ++ local LAST_ERR=/tmp/tmp.Zfc6UI0gME ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WaPfeuaztM ++ cat /tmp/tmp.Zfc6UI0gME ++ rm /tmp/tmp.WaPfeuaztM /tmp/tmp.Zfc6UI0gME ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''<&yLTB]U[L*FG@w?'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''<&yLTB]U[L*FG@w?'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''<&yLTB]U[L*FG@w?'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''<&yLTB]U[L*FG@w?'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t7edoOPUVU +++ mktemp ++ local LAST_ERR=/tmp/tmp.o20cEJnnIN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t7edoOPUVU ++ cat /tmp/tmp.o20cEJnnIN ++ rm /tmp/tmp.t7edoOPUVU /tmp/tmp.o20cEJnnIN ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-4.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-4.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql /tmp/tmp.xYFCLSDYB2/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.uJvWGFlPrb +++ mktemp ++ local LAST_ERR=/tmp/tmp.PdUCYkvlne ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uJvWGFlPrb ++ cat /tmp/tmp.PdUCYkvlne ++ rm /tmp/tmp.uJvWGFlPrb /tmp/tmp.PdUCYkvlne ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.FtUtpefqGJ ++ mktemp + local LAST_ERR=/tmp/tmp.H5F5u9z4eW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FtUtpefqGJ secret/my-cluster-secrets-2 configured + cat /tmp/tmp.H5F5u9z4eW Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.FtUtpefqGJ /tmp/tmp.H5F5u9z4eW + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XkpRqVe2zG +++ mktemp ++ local LAST_ERR=/tmp/tmp.EOOZDO0CyQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XkpRqVe2zG ++ cat /tmp/tmp.EOOZDO0CyQ ++ rm /tmp/tmp.XkpRqVe2zG /tmp/tmp.EOOZDO0CyQ ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-4.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-4.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-4.sql /tmp/tmp.xYFCLSDYB2/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.pD5du66Szg + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18344~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2036-c42c1c6c#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/conf/some-name.yml + local LAST_ERR=/tmp/tmp.0Q3PB4LcKs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pD5du66Szg perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.0Q3PB4LcKs + rm /tmp/tmp.pD5du66Szg /tmp/tmp.0Q3PB4LcKs + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wFMkw7V0cw +++ mktemp ++ local LAST_ERR=/tmp/tmp.EhvjYITZ7i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wFMkw7V0cw ++ cat /tmp/tmp.EhvjYITZ7i ++ rm /tmp/tmp.wFMkw7V0cw /tmp/tmp.EhvjYITZ7i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hHrqGf7byp +++ mktemp ++ local LAST_ERR=/tmp/tmp.If8CldJ8b8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hHrqGf7byp ++ cat /tmp/tmp.If8CldJ8b8 ++ rm /tmp/tmp.hHrqGf7byp /tmp/tmp.If8CldJ8b8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wQujag8M4C +++ mktemp ++ local LAST_ERR=/tmp/tmp.IY77woPCIu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wQujag8M4C ++ cat /tmp/tmp.IY77woPCIu ++ rm /tmp/tmp.wQujag8M4C /tmp/tmp.IY77woPCIu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WZKvmsOG4v +++ mktemp ++ local LAST_ERR=/tmp/tmp.HqvSPo4e4K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WZKvmsOG4v ++ cat /tmp/tmp.HqvSPo4e4K ++ rm /tmp/tmp.WZKvmsOG4v /tmp/tmp.HqvSPo4e4K ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MxF4oed1Tg +++ mktemp ++ local LAST_ERR=/tmp/tmp.gfYjvkqNVO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MxF4oed1Tg ++ cat /tmp/tmp.gfYjvkqNVO ++ rm /tmp/tmp.MxF4oed1Tg /tmp/tmp.gfYjvkqNVO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8UN8twUDnv +++ mktemp ++ local LAST_ERR=/tmp/tmp.xNhJJIPenR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8UN8twUDnv ++ cat /tmp/tmp.xNhJJIPenR ++ rm /tmp/tmp.8UN8twUDnv /tmp/tmp.xNhJJIPenR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VwkTCj3C6u +++ mktemp ++ local LAST_ERR=/tmp/tmp.wx86PQcm6d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VwkTCj3C6u ++ cat /tmp/tmp.wx86PQcm6d ++ rm /tmp/tmp.VwkTCj3C6u /tmp/tmp.wx86PQcm6d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4qssc9o6tw +++ mktemp ++ local LAST_ERR=/tmp/tmp.9yOAFsVLrT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4qssc9o6tw ++ cat /tmp/tmp.9yOAFsVLrT ++ rm /tmp/tmp.4qssc9o6tw /tmp/tmp.9yOAFsVLrT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YSBzj2lYYj +++ mktemp ++ local LAST_ERR=/tmp/tmp.BdkIqJlYOZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YSBzj2lYYj ++ cat /tmp/tmp.BdkIqJlYOZ ++ rm /tmp/tmp.YSBzj2lYYj /tmp/tmp.BdkIqJlYOZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pj72mxywZg +++ mktemp ++ local LAST_ERR=/tmp/tmp.HPkTBJZ1YP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pj72mxywZg ++ cat /tmp/tmp.HPkTBJZ1YP ++ rm /tmp/tmp.pj72mxywZg /tmp/tmp.HPkTBJZ1YP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MqlnDmqfbn +++ mktemp ++ local LAST_ERR=/tmp/tmp.V68pXC5Tcd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MqlnDmqfbn ++ cat /tmp/tmp.V68pXC5Tcd ++ rm /tmp/tmp.MqlnDmqfbn /tmp/tmp.V68pXC5Tcd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uyZY209SO4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.a2RvQfoFmP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uyZY209SO4 ++ cat /tmp/tmp.a2RvQfoFmP ++ rm /tmp/tmp.uyZY209SO4 /tmp/tmp.a2RvQfoFmP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a37xWVIPYg +++ mktemp ++ local LAST_ERR=/tmp/tmp.CctuYUOz2G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a37xWVIPYg ++ cat /tmp/tmp.CctuYUOz2G ++ rm /tmp/tmp.a37xWVIPYg /tmp/tmp.CctuYUOz2G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iZ0axzzxNB +++ mktemp ++ local LAST_ERR=/tmp/tmp.2URKNiIthG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iZ0axzzxNB ++ cat /tmp/tmp.2URKNiIthG ++ rm /tmp/tmp.iZ0axzzxNB /tmp/tmp.2URKNiIthG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.490j6NwMBV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ljkwx0Yns0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.490j6NwMBV ++ cat /tmp/tmp.ljkwx0Yns0 ++ rm /tmp/tmp.490j6NwMBV /tmp/tmp.ljkwx0Yns0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cwHIUgtqWz +++ mktemp ++ local LAST_ERR=/tmp/tmp.GggPoaplYC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cwHIUgtqWz ++ cat /tmp/tmp.GggPoaplYC ++ rm /tmp/tmp.cwHIUgtqWz /tmp/tmp.GggPoaplYC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MrylwkgVsI +++ mktemp ++ local LAST_ERR=/tmp/tmp.PywvD5HA2w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MrylwkgVsI ++ cat /tmp/tmp.PywvD5HA2w ++ rm /tmp/tmp.MrylwkgVsI /tmp/tmp.PywvD5HA2w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6kzrxkreTZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.elFt7uoyR9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6kzrxkreTZ ++ cat /tmp/tmp.elFt7uoyR9 ++ rm /tmp/tmp.6kzrxkreTZ /tmp/tmp.elFt7uoyR9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DEeShRJHXR +++ mktemp ++ local LAST_ERR=/tmp/tmp.HMxvxeFydm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DEeShRJHXR ++ cat /tmp/tmp.HMxvxeFydm ++ rm /tmp/tmp.DEeShRJHXR /tmp/tmp.HMxvxeFydm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eMTejuFaq1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4Yv2vRAVze ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eMTejuFaq1 ++ cat /tmp/tmp.4Yv2vRAVze ++ rm /tmp/tmp.eMTejuFaq1 /tmp/tmp.4Yv2vRAVze ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HCvn29kZrk +++ mktemp ++ local LAST_ERR=/tmp/tmp.tSlFW8Y6JE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HCvn29kZrk ++ cat /tmp/tmp.tSlFW8Y6JE ++ rm /tmp/tmp.HCvn29kZrk /tmp/tmp.tSlFW8Y6JE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q0rpmH3nZ5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.F2qwcJcaWD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q0rpmH3nZ5 ++ cat /tmp/tmp.F2qwcJcaWD ++ rm /tmp/tmp.Q0rpmH3nZ5 /tmp/tmp.F2qwcJcaWD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ogy3gjiO3J +++ mktemp ++ local LAST_ERR=/tmp/tmp.4BWf1HEpuc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ogy3gjiO3J ++ cat /tmp/tmp.4BWf1HEpuc ++ rm /tmp/tmp.ogy3gjiO3J /tmp/tmp.4BWf1HEpuc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EdNi53OdAb +++ mktemp ++ local LAST_ERR=/tmp/tmp.HLlNhNfKcj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EdNi53OdAb ++ cat /tmp/tmp.HLlNhNfKcj ++ rm /tmp/tmp.EdNi53OdAb /tmp/tmp.HLlNhNfKcj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FfMWoN2T8t +++ mktemp ++ local LAST_ERR=/tmp/tmp.dIPIrY5yYP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FfMWoN2T8t ++ cat /tmp/tmp.dIPIrY5yYP ++ rm /tmp/tmp.FfMWoN2T8t /tmp/tmp.dIPIrY5yYP ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aahr6CGFyg +++ mktemp ++ local LAST_ERR=/tmp/tmp.pABpGCZbJx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aahr6CGFyg ++ cat /tmp/tmp.pABpGCZbJx ++ rm /tmp/tmp.aahr6CGFyg /tmp/tmp.pABpGCZbJx ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.nD5YkM7eH4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.BbwmwhTTsU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.nD5YkM7eH4 +++++ cat /tmp/tmp.BbwmwhTTsU +++++ rm /tmp/tmp.nD5YkM7eH4 /tmp/tmp.BbwmwhTTsU +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h2qnOPQSVE +++ mktemp ++ local LAST_ERR=/tmp/tmp.oV0gdFmWZy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h2qnOPQSVE ++ cat /tmp/tmp.oV0gdFmWZy ++ rm /tmp/tmp.h2qnOPQSVE /tmp/tmp.oV0gdFmWZy ++ return 0 + [[ 2 == \3 ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e567PzuJHi +++ mktemp ++ local LAST_ERR=/tmp/tmp.JpzXSAGCcR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e567PzuJHi ++ cat /tmp/tmp.JpzXSAGCcR ++ rm /tmp/tmp.e567PzuJHi /tmp/tmp.JpzXSAGCcR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L0Gv7OJYxG +++ mktemp ++ local LAST_ERR=/tmp/tmp.quFBh39YDD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L0Gv7OJYxG ++ cat /tmp/tmp.quFBh39YDD ++ rm /tmp/tmp.L0Gv7OJYxG /tmp/tmp.quFBh39YDD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OL2Btcyw8c +++ mktemp ++ local LAST_ERR=/tmp/tmp.nOqoeHKInW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OL2Btcyw8c ++ cat /tmp/tmp.nOqoeHKInW ++ rm /tmp/tmp.OL2Btcyw8c /tmp/tmp.nOqoeHKInW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ILkPvaNRD1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9ggAaBH1pD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ILkPvaNRD1 ++ cat /tmp/tmp.9ggAaBH1pD ++ rm /tmp/tmp.ILkPvaNRD1 /tmp/tmp.9ggAaBH1pD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w7vxZQO9vk +++ mktemp ++ local LAST_ERR=/tmp/tmp.3mzZv6z1E1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w7vxZQO9vk ++ cat /tmp/tmp.3mzZv6z1E1 ++ rm /tmp/tmp.w7vxZQO9vk /tmp/tmp.3mzZv6z1E1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MpOxqdm1Zq +++ mktemp ++ local LAST_ERR=/tmp/tmp.IeY5RLZb5i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MpOxqdm1Zq ++ cat /tmp/tmp.IeY5RLZb5i ++ rm /tmp/tmp.MpOxqdm1Zq /tmp/tmp.IeY5RLZb5i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KCf7Nrqz9l +++ mktemp ++ local LAST_ERR=/tmp/tmp.DcGqDQrzmC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KCf7Nrqz9l ++ cat /tmp/tmp.DcGqDQrzmC ++ rm /tmp/tmp.KCf7Nrqz9l /tmp/tmp.DcGqDQrzmC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e7crYC92mj +++ mktemp ++ local LAST_ERR=/tmp/tmp.jcAvLaKvHo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e7crYC92mj ++ cat /tmp/tmp.jcAvLaKvHo ++ rm /tmp/tmp.e7crYC92mj /tmp/tmp.jcAvLaKvHo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TjdoUwn2Sv +++ mktemp ++ local LAST_ERR=/tmp/tmp.p59COl8oU0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TjdoUwn2Sv ++ cat /tmp/tmp.p59COl8oU0 ++ rm /tmp/tmp.TjdoUwn2Sv /tmp/tmp.p59COl8oU0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6dk3EiDQFC +++ mktemp ++ local LAST_ERR=/tmp/tmp.U47OoE4YG5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6dk3EiDQFC ++ cat /tmp/tmp.U47OoE4YG5 ++ rm /tmp/tmp.6dk3EiDQFC /tmp/tmp.U47OoE4YG5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lc1FIEbOrN +++ mktemp ++ local LAST_ERR=/tmp/tmp.YOGPIc15zs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lc1FIEbOrN ++ cat /tmp/tmp.YOGPIc15zs ++ rm /tmp/tmp.lc1FIEbOrN /tmp/tmp.YOGPIc15zs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LxpWtIIFN6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BMuQcHoGif ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LxpWtIIFN6 ++ cat /tmp/tmp.BMuQcHoGif ++ rm /tmp/tmp.LxpWtIIFN6 /tmp/tmp.BMuQcHoGif ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N52Ym8Jdck +++ mktemp ++ local LAST_ERR=/tmp/tmp.D24KM3UsPj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N52Ym8Jdck ++ cat /tmp/tmp.D24KM3UsPj ++ rm /tmp/tmp.N52Ym8Jdck /tmp/tmp.D24KM3UsPj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cYNhvdJZRo +++ mktemp ++ local LAST_ERR=/tmp/tmp.4TYr3ffj56 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cYNhvdJZRo ++ cat /tmp/tmp.4TYr3ffj56 ++ rm /tmp/tmp.cYNhvdJZRo /tmp/tmp.4TYr3ffj56 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hx7FGz0hyy +++ mktemp ++ local LAST_ERR=/tmp/tmp.iCHc7LsJ6G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hx7FGz0hyy ++ cat /tmp/tmp.iCHc7LsJ6G ++ rm /tmp/tmp.Hx7FGz0hyy /tmp/tmp.iCHc7LsJ6G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Stj6jSvE2N +++ mktemp ++ local LAST_ERR=/tmp/tmp.r6oUsX8wcx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Stj6jSvE2N ++ cat /tmp/tmp.r6oUsX8wcx ++ rm /tmp/tmp.Stj6jSvE2N /tmp/tmp.r6oUsX8wcx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2mBurAbaZS +++ mktemp ++ local LAST_ERR=/tmp/tmp.yuFQI37hm3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2mBurAbaZS ++ cat /tmp/tmp.yuFQI37hm3 ++ rm /tmp/tmp.2mBurAbaZS /tmp/tmp.yuFQI37hm3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2iTXY70SON +++ mktemp ++ local LAST_ERR=/tmp/tmp.UkgEZgPMnP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2iTXY70SON ++ cat /tmp/tmp.UkgEZgPMnP ++ rm /tmp/tmp.2iTXY70SON /tmp/tmp.UkgEZgPMnP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oyKvLwppaE +++ mktemp ++ local LAST_ERR=/tmp/tmp.7UCBrL7IFY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oyKvLwppaE ++ cat /tmp/tmp.7UCBrL7IFY ++ rm /tmp/tmp.oyKvLwppaE /tmp/tmp.7UCBrL7IFY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sqs71fCcjH +++ mktemp ++ local LAST_ERR=/tmp/tmp.kyF9B5lrme ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sqs71fCcjH ++ cat /tmp/tmp.kyF9B5lrme ++ rm /tmp/tmp.sqs71fCcjH /tmp/tmp.kyF9B5lrme ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GFxBSAzMXN +++ mktemp ++ local LAST_ERR=/tmp/tmp.oXBDX4O6pF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GFxBSAzMXN ++ cat /tmp/tmp.oXBDX4O6pF ++ rm /tmp/tmp.GFxBSAzMXN /tmp/tmp.oXBDX4O6pF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rHSufuk2Eu +++ mktemp ++ local LAST_ERR=/tmp/tmp.D0iBmjCVeR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rHSufuk2Eu ++ cat /tmp/tmp.D0iBmjCVeR ++ rm /tmp/tmp.rHSufuk2Eu /tmp/tmp.D0iBmjCVeR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Yx7xNf53S5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.s3imopXhYw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Yx7xNf53S5 ++ cat /tmp/tmp.s3imopXhYw ++ rm /tmp/tmp.Yx7xNf53S5 /tmp/tmp.s3imopXhYw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nSjXAbuzx2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tHmlEhPW6C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nSjXAbuzx2 ++ cat /tmp/tmp.tHmlEhPW6C ++ rm /tmp/tmp.nSjXAbuzx2 /tmp/tmp.tHmlEhPW6C ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OaDf0eMUhi +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fd4FPG8wty ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OaDf0eMUhi ++ cat /tmp/tmp.Fd4FPG8wty ++ rm /tmp/tmp.OaDf0eMUhi /tmp/tmp.Fd4FPG8wty ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TgZZ1u6DfH +++ mktemp ++ local LAST_ERR=/tmp/tmp.lcZhDJY4jq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TgZZ1u6DfH ++ cat /tmp/tmp.lcZhDJY4jq ++ rm /tmp/tmp.TgZZ1u6DfH /tmp/tmp.lcZhDJY4jq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DHyOoufPam +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rth4Pcm8b6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DHyOoufPam ++ cat /tmp/tmp.Rth4Pcm8b6 ++ rm /tmp/tmp.DHyOoufPam /tmp/tmp.Rth4Pcm8b6 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kbunaeuNy1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Mwp2XoKUip ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kbunaeuNy1 ++ cat /tmp/tmp.Mwp2XoKUip ++ rm /tmp/tmp.kbunaeuNy1 /tmp/tmp.Mwp2XoKUip ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GAJ47aYtNG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.aoMrmzokbl +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GAJ47aYtNG +++++ cat /tmp/tmp.aoMrmzokbl +++++ rm /tmp/tmp.GAJ47aYtNG /tmp/tmp.aoMrmzokbl +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jdINwlXbVm +++ mktemp ++ local LAST_ERR=/tmp/tmp.5eLn0WkIwF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jdINwlXbVm ++ cat /tmp/tmp.5eLn0WkIwF ++ rm /tmp/tmp.jdINwlXbVm /tmp/tmp.5eLn0WkIwF ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 2 haproxy some-name + local generation=2 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WPhrqAAGy8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AwoNFcamK6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WPhrqAAGy8 ++ cat /tmp/tmp.AwoNFcamK6 ++ rm /tmp/tmp.WPhrqAAGy8 /tmp/tmp.AwoNFcamK6 ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.bgu7MnG8jA ++ mktemp + local LAST_ERR=/tmp/tmp.3FnIqR0BFi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bgu7MnG8jA secret/my-cluster-secrets patched + cat /tmp/tmp.3FnIqR0BFi + rm /tmp/tmp.bgu7MnG8jA /tmp/tmp.3FnIqR0BFi + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TwaOWP3pNZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.DLXJGipetD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TwaOWP3pNZ ++ cat /tmp/tmp.DLXJGipetD ++ rm /tmp/tmp.TwaOWP3pNZ /tmp/tmp.DLXJGipetD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Rk2KsW0s7A +++ mktemp ++ local LAST_ERR=/tmp/tmp.qiRKwgaqpI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Rk2KsW0s7A ++ cat /tmp/tmp.qiRKwgaqpI ++ rm /tmp/tmp.Rk2KsW0s7A /tmp/tmp.qiRKwgaqpI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bhiONZmgVy +++ mktemp ++ local LAST_ERR=/tmp/tmp.fU4qFYUCuw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bhiONZmgVy ++ cat /tmp/tmp.fU4qFYUCuw ++ rm /tmp/tmp.bhiONZmgVy /tmp/tmp.fU4qFYUCuw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KM2696Z7hc +++ mktemp ++ local LAST_ERR=/tmp/tmp.5kOv6BsEFy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KM2696Z7hc ++ cat /tmp/tmp.5kOv6BsEFy ++ rm /tmp/tmp.KM2696Z7hc /tmp/tmp.5kOv6BsEFy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hfnvwx0cKb +++ mktemp ++ local LAST_ERR=/tmp/tmp.duNa9eSf2W ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hfnvwx0cKb ++ cat /tmp/tmp.duNa9eSf2W ++ rm /tmp/tmp.Hfnvwx0cKb /tmp/tmp.duNa9eSf2W ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.luZ8yWTeXF +++ mktemp ++ local LAST_ERR=/tmp/tmp.VWrOaRX4gb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.luZ8yWTeXF ++ cat /tmp/tmp.VWrOaRX4gb ++ rm /tmp/tmp.luZ8yWTeXF /tmp/tmp.VWrOaRX4gb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sjvKpGFVIQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.eSDIR5Dsyk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sjvKpGFVIQ ++ cat /tmp/tmp.eSDIR5Dsyk ++ rm /tmp/tmp.sjvKpGFVIQ /tmp/tmp.eSDIR5Dsyk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.grIHSiJkH0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kPwz2qSLda ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.grIHSiJkH0 ++ cat /tmp/tmp.kPwz2qSLda ++ rm /tmp/tmp.grIHSiJkH0 /tmp/tmp.kPwz2qSLda ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B5YqXC2JR6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.W09DwGy7tt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B5YqXC2JR6 ++ cat /tmp/tmp.W09DwGy7tt ++ rm /tmp/tmp.B5YqXC2JR6 /tmp/tmp.W09DwGy7tt ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SbBwlCpfHO +++ mktemp ++ local LAST_ERR=/tmp/tmp.bwkD31kpQz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SbBwlCpfHO ++ cat /tmp/tmp.bwkD31kpQz ++ rm /tmp/tmp.SbBwlCpfHO /tmp/tmp.bwkD31kpQz ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KpLMorSJTY ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.j87UXQntvC +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KpLMorSJTY +++++ cat /tmp/tmp.j87UXQntvC +++++ rm /tmp/tmp.KpLMorSJTY /tmp/tmp.j87UXQntvC +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sbyoBvcwbd +++ mktemp ++ local LAST_ERR=/tmp/tmp.uFguyP9Tle ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sbyoBvcwbd ++ cat /tmp/tmp.uFguyP9Tle ++ rm /tmp/tmp.sbyoBvcwbd /tmp/tmp.uFguyP9Tle ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-3-57.sql ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gWgiu6ybHJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.kojEMEtZTy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gWgiu6ybHJ ++ cat /tmp/tmp.kojEMEtZTy ++ rm /tmp/tmp.gWgiu6ybHJ /tmp/tmp.kojEMEtZTy ++ return 0 + client_pod=pxc-client-857d976497-mch92 + wait_pod pxc-client-857d976497-mch92 + local pod=pxc-client-857d976497-mch92 + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-mch92 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-mch92 condition met waiting for pod/pxc-client-857d976497-mch92 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.xYFCLSDYB2/select-3.sql ]] ++ cat /tmp/tmp.xYFCLSDYB2/select-3.sql ++ grep 'Unknown MySQL server host' + [[ -n '' ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2036/e2e-tests/users/compare/select-3.sql /tmp/tmp.xYFCLSDYB2/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 3 haproxy some-name + local generation=3 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cUIx8G1ehm +++ mktemp ++ local LAST_ERR=/tmp/tmp.zXTHLmeiUZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cUIx8G1ehm ++ cat /tmp/tmp.zXTHLmeiUZ ++ rm /tmp/tmp.cUIx8G1ehm /tmp/tmp.zXTHLmeiUZ ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + destroy users-18344 + local namespace=users-18344 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'get backup status: Job.batch' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + tee /tmp/tmp.xYFCLSDYB2/operator.log + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v 'the object has been modified' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.QN9T2qcfl4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KSMd97BGwi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QN9T2qcfl4 ++ cat /tmp/tmp.KSMd97BGwi ++ rm /tmp/tmp.QN9T2qcfl4 /tmp/tmp.KSMd97BGwi ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-779b89dbf-mdv77 ++ mktemp + local LAST_OUT=/tmp/tmp.2ampy1IBfd ++ mktemp + local LAST_ERR=/tmp/tmp.SVYmsFttRe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-779b89dbf-mdv77 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2ampy1IBfd + cat /tmp/tmp.SVYmsFttRe + rm /tmp/tmp.2ampy1IBfd /tmp/tmp.SVYmsFttRe + return 0 } }, }, { }, }, { }, }, ""), }, { }, }, }, - }, - { - }, - { - }, - }, + }, ... // 15 identical fields - "16bb979cde24ab00230c0991431834b9042c2723e7b4de2ae5e801dfa30188da", + "16bb979cde24ab00230c0991431834b9042c2723e7b4de2ae5e801dfa30188da", ... // 16 identical fields - "1df2bef22f75486a15612307b81b909771aa757971d27a9783e8717b1d60cc1", 2025-07-29T21:43:26.601Z INFO setup Manager starting up {"gitCommit": "c42c1c6c037dce278a252d0128405e7eaf4456e3", "gitBranch": "PR-2036-c42c1c6c", "buildTime": "2025-07-29T19:17:53Z", "goVersion": "go1.24.5", "os": "linux", "arch": "amd64"} 2025-07-29T21:43:26.601Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.11-gke.1002000"} 2025-07-29T21:43:26.605Z INFO setup Registering Components. 2025-07-29T21:43:27.989Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-07-29T21:43:27.989Z INFO controller-runtime.metrics Starting metrics server 2025-07-29T21:43:27.989Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-07-29T21:43:27.989Z INFO controller-runtime.webhook Starting webhook server 2025-07-29T21:43:27.989Z INFO setup Starting the Cmd. 2025-07-29T21:43:27.989Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-07-29T21:43:27.990Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"interval": "10s"} 2025-07-29T21:43:27.990Z INFO controller-runtime.certwatcher Updated current TLS certificate 2025-07-29T21:43:27.990Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-07-29T21:43:28.089Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-07-29T21:43:28.125Z DEBUG events percona-xtradb-cluster-operator-779b89dbf-mdv77_f5657093-70fd-4c60-8f85-8ff5205d2e5a became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"715ddf1b-5ee6-4775-9fce-6d6b24fe62c3","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1753825408118927009"}, "reason": "LeaderElection"} 2025-07-29T21:43:28.125Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-07-29T21:43:28.125Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-07-29T21:43:28.125Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-07-29T21:43:28.125Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-07-29T21:43:28.125Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-07-29T21:43:28.226Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-07-29T21:43:28.226Z INFO Starting Controller {"controller": "pxc-controller"} 2025-07-29T21:43:28.226Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-07-29T21:43:28.226Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-07-29T21:43:28.226Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-07-29T21:43:28.226Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-07-29T21:44:07.915Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "955a06be-d491-4c63-b518-9d277fc93112", "version": "1.18.0"} 2025-07-29T21:44:08.181Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "955a06be-d491-4c63-b518-9d277fc93112", "secrets": "my-cluster-secrets"} 2025-07-29T21:44:08.410Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "955a06be-d491-4c63-b518-9d277fc93112", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-07-29T21:44:08.429Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "955a06be-d491-4c63-b518-9d277fc93112", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-07-29T21:44:09.003Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "955a06be-d491-4c63-b518-9d277fc93112", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:48\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:202\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:48\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:202\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:50\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:202\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:202\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:202\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-07-29T21:44:09.129Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "84c01c96-d04d-461b-b4f8-4f6d21763045", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-07-29T21:44:09.187Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "84c01c96-d04d-461b-b4f8-4f6d21763045", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-07-29T21:44:09.254Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "84c01c96-d04d-461b-b4f8-4f6d21763045", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-07-29T21:44:09.335Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "84c01c96-d04d-461b-b4f8-4f6d21763045", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-07-29T21:44:09.451Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "84c01c96-d04d-461b-b4f8-4f6d21763045", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-07-29T21:44:09.644Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "84c01c96-d04d-461b-b4f8-4f6d21763045", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-07-29T21:44:10.239Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "d770a6ab-0214-4e2d-8d90-236c550dba4e", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-07-29T21:44:10.262Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "d770a6ab-0214-4e2d-8d90-236c550dba4e", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-07-29T21:45:27.267Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "1f1342f9-8c8f-4e08-989a-f7c0706e0788", "user": "operator"} 2025-07-29T21:45:27.296Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "1f1342f9-8c8f-4e08-989a-f7c0706e0788", "user": "monitor"} 2025-07-29T21:45:27.329Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "1f1342f9-8c8f-4e08-989a-f7c0706e0788"} 2025-07-29T21:45:27.370Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "1f1342f9-8c8f-4e08-989a-f7c0706e0788", "user": "xtrabackup"} 2025-07-29T21:45:27.406Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "1f1342f9-8c8f-4e08-989a-f7c0706e0788"} 2025-07-29T21:45:27.420Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "1f1342f9-8c8f-4e08-989a-f7c0706e0788", "err": "get primary pxc pod: not found"} 2025-07-29T21:45:32.274Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "4212ece3-8117-4add-9179-e2747037aea4", "err": "get primary pxc pod: not found"} 2025-07-29T21:45:37.437Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "c1b26c08-8e6e-493e-b0c8-41c7750f8a5a", "err": "get primary pxc pod: not found"} 2025-07-29T21:45:42.616Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "dbccddc3-3ef4-4c95-80f7-33ae3960c8d9", "err": "get primary pxc pod: not found"} 2025-07-29T21:47:54.262Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "5dfd8e10-cd88-48e5-8ead-6a74292246b5", "user": "root"} 2025-07-29T21:47:54.320Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "5dfd8e10-cd88-48e5-8ead-6a74292246b5", "user": "replication"} 2025-07-29T21:47:54.376Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "5dfd8e10-cd88-48e5-8ead-6a74292246b5", "new version": "5.7.44-48-57"} 2025-07-29T21:47:56.072Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "5dfd8e10-cd88-48e5-8ead-6a74292246b5"} 2025-07-29T21:48:00.960Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "01e3ea01-cb85-46b3-ae31-e30d711bff59"} 2025-07-29T21:48:06.204Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "9f49cded-ab02-40b9-b942-bb57992db102"} 2025-07-29T21:48:11.508Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "9a0ea55b-4631-4f6a-9329-bacf36373f3d"} 2025-07-29T21:48:16.700Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "04c82817-2d7b-4262-aec8-ff32248bb387"} 2025-07-29T21:48:22.386Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "bc3bb9f6-9009-4aa2-816f-a29316d1b78d"} 2025-07-29T21:48:27.265Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "c4c20d31-e3e3-442e-9ca5-1fc4fa2f3351"} 2025-07-29T21:48:32.471Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "c7c16542-8127-4618-b888-b4b7ab142e7e"} 2025-07-29T21:48:38.761Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "94d4eff8-32fb-4970-b17f-901578a01967"} 2025-07-29T21:48:44.092Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e1dcd689-e971-4440-b537-2658990bb9db"} 2025-07-29T21:48:49.195Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "6dfc36a7-5528-4b7c-86a2-f2d0a7c5a3d6"} 2025-07-29T21:48:54.672Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "f991d66e-c5e7-4d8a-8ef7-f6636ecf748e"} 2025-07-29T21:48:59.810Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "901d2f82-71b4-41d3-b7ef-1cfd1fa5c566"} 2025-07-29T21:49:05.776Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ba664efc-5179-48a2-9ce3-d6dfd483cc96"} 2025-07-29T21:49:11.400Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "a8d8482a-79ba-4ac6-b3a1-b5cb87b0c1a1"} 2025-07-29T21:49:17.604Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "1404a1f0-42a8-412f-8351-70bf861e9c18"} 2025-07-29T21:49:23.682Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "81d5c66c-d00c-43e1-b832-85ebc140a1e8"} 2025-07-29T21:49:29.202Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "60da927f-f126-4c9e-92c5-a4d42673b601"} 2025-07-29T21:49:34.206Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "4a095253-b073-4fc9-88c3-6dff52acb40d"} 2025-07-29T21:49:39.605Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "91856bca-4c3b-4c18-81a6-389a22ce7978"} 2025-07-29T21:49:42.059Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "adc2f296-b015-48d1-a84e-71c9ed38f101", "user": "root"} 2025-07-29T21:49:42.100Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "adc2f296-b015-48d1-a84e-71c9ed38f101", "user": "root"} 2025-07-29T21:49:42.118Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "adc2f296-b015-48d1-a84e-71c9ed38f101", "secret": "some-name-mysql-init", "user": "root"} 2025-07-29T21:49:44.605Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "adc2f296-b015-48d1-a84e-71c9ed38f101"} 2025-07-29T21:49:44.630Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "adc2f296-b015-48d1-a84e-71c9ed38f101", "user": "root"} 2025-07-29T21:49:46.372Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "adc2f296-b015-48d1-a84e-71c9ed38f101"} 2025-07-29T21:49:52.493Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "f3c82d45-02b5-4ba3-8e20-56eecac9ca5b"} 2025-07-29T21:49:56.959Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "21284c36-19ad-4711-950e-09e236423f91"} 2025-07-29T21:50:02.189Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "b0e9a3f4-73e2-43f5-a5d5-e8034609099c"} 2025-07-29T21:50:03.682Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "0d50d149-e0cb-4033-bf0a-5d561285c60f", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T21:50:03.750Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "0d50d149-e0cb-4033-bf0a-5d561285c60f", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T21:50:06.851Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "0d50d149-e0cb-4033-bf0a-5d561285c60f", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-07-29T21:50:31.831Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "d68f961d-3edf-4543-9211-e7bfd6c72780", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-07-29T21:50:35.761Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "dbc04334-2abf-4aa1-a741-074619fba693", "user": "proxyadmin"} 2025-07-29T21:50:35.761Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "dbc04334-2abf-4aa1-a741-074619fba693", "user": "proxyadmin"} 2025-07-29T21:50:35.838Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "dbc04334-2abf-4aa1-a741-074619fba693", "user": "proxyadmin"} 2025-07-29T21:50:35.937Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "dbc04334-2abf-4aa1-a741-074619fba693", "user": "proxyadmin"} 2025-07-29T21:50:35.937Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "dbc04334-2abf-4aa1-a741-074619fba693", "last-applied-secret": "6a1349e06262ea44b3f1dd44e902bca45ffe82ef57d5ceff89f80a54ee2c0ac4"} 2025-07-29T21:50:35.941Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "dbc04334-2abf-4aa1-a741-074619fba693", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T21:50:37.540Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "589c1cdd-fddb-4d42-8372-3fad945d02fd", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:509) : Galera hostgroup retrieval failed. \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:509) : Galera hostgroup retrieval failed. \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-07-29T21:51:18.907Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "b2648e80-b0fb-4518-b352-3f4fbb190d4f"} 2025-07-29T21:51:23.861Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e0f1893b-52f8-4f54-925e-508c43dff302"} 2025-07-29T21:51:26.326Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "48fd1835-e557-48e2-a207-20f6f80ab14c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T21:51:26.489Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "48fd1835-e557-48e2-a207-20f6f80ab14c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T21:51:28.351Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "48fd1835-e557-48e2-a207-20f6f80ab14c"} 2025-07-29T21:51:33.611Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ecb9687c-e965-4a96-bdc6-ff69b7751726", "user": "xtrabackup"} 2025-07-29T21:51:33.638Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ecb9687c-e965-4a96-bdc6-ff69b7751726", "user": "xtrabackup"} 2025-07-29T21:51:33.668Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ecb9687c-e965-4a96-bdc6-ff69b7751726", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-07-29T21:51:33.709Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ecb9687c-e965-4a96-bdc6-ff69b7751726", "user": "xtrabackup"} 2025-07-29T21:51:33.709Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ecb9687c-e965-4a96-bdc6-ff69b7751726", "last-applied-secret": "1df2bef22f75486a15612307b81b909771aa757971d27a9783e8717b1d60cc15"} 2025-07-29T21:51:33.712Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ecb9687c-e965-4a96-bdc6-ff69b7751726", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T21:51:36.463Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ecb9687c-e965-4a96-bdc6-ff69b7751726"} 2025-07-29T21:53:18.813Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "07cc0549-214e-4e1c-b0e9-637a671ae05b", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.11.138.63:33062: connect: connection refused"} 2025-07-29T21:53:24.138Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "0d31022c-4ec1-43ea-826c-6164e2661b91", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T21:53:29.481Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "58e5b4ac-17d0-4dc1-a119-38b7117bf9d0", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T21:53:34.790Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "7ad414ed-720a-44c1-81c5-350e5e9233e7", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T21:53:40.548Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "01284d37-caee-4198-8a72-d238f7f9d9c9", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T21:53:45.722Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "2b790cb9-e411-4393-901b-93c23587975b", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T21:53:51.108Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "1349ab59-ee31-49e1-9c87-29d4f115d896", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T21:53:59.825Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "194f6b45-42ff-489d-ab93-44daca889061"} 2025-07-29T21:54:03.225Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "a4348662-14b1-426d-bd76-4878f33fc3d7", "user": "monitor"} 2025-07-29T21:54:03.251Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "a4348662-14b1-426d-bd76-4878f33fc3d7", "user": "monitor"} 2025-07-29T21:54:03.276Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "a4348662-14b1-426d-bd76-4878f33fc3d7", "secret": "some-name-mysql-init", "user": "monitor"} 2025-07-29T21:54:03.321Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "a4348662-14b1-426d-bd76-4878f33fc3d7", "user": "monitor"} 2025-07-29T21:54:03.348Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "a4348662-14b1-426d-bd76-4878f33fc3d7", "user": "monitor"} 2025-07-29T21:54:03.348Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "a4348662-14b1-426d-bd76-4878f33fc3d7", "last-applied-secret": "e00bdbeda7cf17afbcddc376cd40f782dff2a910b180bbb8f3eb6273a4b6ef4a"} 2025-07-29T21:54:03.351Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "a4348662-14b1-426d-bd76-4878f33fc3d7", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T21:54:05.572Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "930b3646-fe18-4dd3-b8dd-a478f630cdbd", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-07-29T21:54:52.165Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "d12c4fd5-489f-4f7a-b371-13f8e944b510"} 2025-07-29T21:54:56.606Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "1dcdc705-b8da-46c8-91b0-2b0c86b361e5"} 2025-07-29T21:55:02.486Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ff235d6e-5d54-4491-b1d0-d1a955e8d1b1"} 2025-07-29T21:55:08.306Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "1d7cbfca-f5bd-4d1b-b5fd-f78c7c27f1e5"} 2025-07-29T21:55:11.272Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "acfeef6f-5380-41f7-9eaa-a31efbf1f1ff", "user": "operator"} 2025-07-29T21:55:11.328Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "acfeef6f-5380-41f7-9eaa-a31efbf1f1ff", "user": "operator"} 2025-07-29T21:55:11.351Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "acfeef6f-5380-41f7-9eaa-a31efbf1f1ff", "secret": "some-name-mysql-init", "user": "operator"} 2025-07-29T21:55:11.378Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "acfeef6f-5380-41f7-9eaa-a31efbf1f1ff", "user": "operator"} 2025-07-29T21:55:11.378Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "acfeef6f-5380-41f7-9eaa-a31efbf1f1ff", "last-applied-secret": "7c0a6573e470596ef1ac25a09529b21e369e7928d2ff118dc38d510c95c6158e"} 2025-07-29T21:55:11.382Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "acfeef6f-5380-41f7-9eaa-a31efbf1f1ff", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T21:55:15.077Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "acfeef6f-5380-41f7-9eaa-a31efbf1f1ff", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-07-29T21:55:41.679Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "f82c59f2-6d89-45e4-9cec-0aba01abe5f3"} 2025-07-29T21:55:46.124Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "33b72ab1-e662-4e53-b2f6-40e72b825305"} 2025-07-29T21:55:51.414Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "7d446b8a-b296-4280-9aa8-61b06190f04a"} 2025-07-29T21:55:56.239Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "22d32195-488d-47ac-8528-c4d3bc3af837"} 2025-07-29T21:55:57.165Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "secrets": "my-cluster-secrets-2"} 2025-07-29T21:55:57.165Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "root"} 2025-07-29T21:55:57.205Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "root"} 2025-07-29T21:55:57.227Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "secret": "some-name-mysql-init", "user": "root"} 2025-07-29T21:55:59.660Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d"} 2025-07-29T21:55:59.677Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "root"} 2025-07-29T21:55:59.677Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "operator"} 2025-07-29T21:55:59.703Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "operator"} 2025-07-29T21:55:59.728Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "secret": "some-name-mysql-init", "user": "operator"} 2025-07-29T21:55:59.784Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "operator"} 2025-07-29T21:55:59.784Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "monitor"} 2025-07-29T21:55:59.801Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "monitor"} 2025-07-29T21:55:59.818Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "secret": "some-name-mysql-init", "user": "monitor"} 2025-07-29T21:55:59.849Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "monitor"} 2025-07-29T21:55:59.867Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "monitor"} 2025-07-29T21:55:59.867Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "xtrabackup"} 2025-07-29T21:55:59.881Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "xtrabackup"} 2025-07-29T21:55:59.903Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-07-29T21:55:59.931Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "xtrabackup"} 2025-07-29T21:55:59.931Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "replication"} 2025-07-29T21:55:59.955Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "replication"} 2025-07-29T21:55:59.975Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "secret": "some-name-mysql-init", "user": "replication"} 2025-07-29T21:55:59.994Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "replication"} 2025-07-29T21:55:59.994Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "proxyadmin"} 2025-07-29T21:56:00.027Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "proxyadmin"} 2025-07-29T21:56:00.047Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "user": "proxyadmin"} 2025-07-29T21:56:00.047Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "last-applied-secret": "3d43a8ce65daa2ec7dbae02d57f48d70577500dc8a1ac59512a478c79ff7be05"} 2025-07-29T21:56:00.047Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "last-applied-secret": "3d43a8ce65daa2ec7dbae02d57f48d70577500dc8a1ac59512a478c79ff7be05"} 2025-07-29T21:56:00.050Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T21:56:00.119Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T21:56:02.325Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "410d7532-1e32-4b3a-8e80-eadccccca90d", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-07-29T21:57:45.421Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "91b3cf4e-28de-4a8b-bb04-4bdfa7e3a832", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18344 on 34.118.224.10:53: no such host"} 2025-07-29T21:57:50.419Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "5c24abab-1fb4-4e9d-a93c-d0bc2070c9f9", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18344 on 34.118.224.10:53: no such host"} 2025-07-29T21:57:55.625Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "4111e0ee-3fdf-4452-a2ba-530b860a0d90", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T21:58:00.827Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "33ed23d8-4448-43e0-8df8-22ee52c78ae3", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T21:58:05.996Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "bc482c8f-b078-48cd-8bed-8b0d7731b8fe", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T21:58:11.228Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "88eb61d6-b58a-4145-8425-c58a0e7bf19e", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T21:58:16.367Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "3a6148f9-9e67-4aa7-a9c6-d5ea4033a7cf", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T21:58:21.749Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "4194ee97-6af0-4f38-ab1a-d80b24656ff3", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T21:58:29.555Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "031cfe65-abce-42e3-a04b-2af536e46eee"} 2025-07-29T21:58:34.335Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "6e8d3d69-5d68-4438-8e62-5cda12625f3b"} 2025-07-29T21:58:35.334Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "03ce9a6b-e73e-4dc5-b828-733d9ceed0b6", "user": "operator"} 2025-07-29T21:58:35.361Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "03ce9a6b-e73e-4dc5-b828-733d9ceed0b6", "user": "operator"} 2025-07-29T21:58:35.378Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "03ce9a6b-e73e-4dc5-b828-733d9ceed0b6", "secret": "some-name-mysql-init", "user": "operator"} 2025-07-29T21:58:35.396Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "03ce9a6b-e73e-4dc5-b828-733d9ceed0b6", "user": "operator"} 2025-07-29T21:58:35.397Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "03ce9a6b-e73e-4dc5-b828-733d9ceed0b6", "last-applied-secret": "4e558d1a98a64a43a9b6240e853cc375aee2dd11404bc1b42bb0bbfa176babb1"} 2025-07-29T21:58:35.400Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "03ce9a6b-e73e-4dc5-b828-733d9ceed0b6", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T21:58:39.740Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "03ce9a6b-e73e-4dc5-b828-733d9ceed0b6", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18344.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-07-29T21:59:09.282Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "39f5b320-f601-4147-8d34-b338c72a430f"} 2025-07-29T21:59:13.643Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8dbe56d2-73a0-4efc-9636-e73f95604527"} 2025-07-29T21:59:19.045Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ee08087f-01a8-459d-b267-18729f8c514c"} 2025-07-29T21:59:24.450Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "5d31206e-ca05-4638-82f9-08282fcf0df1"} 2025-07-29T21:59:29.645Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "d9f3a381-17a2-4b78-9c8d-a20977e27284"} 2025-07-29T21:59:34.982Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "4650a357-ca8e-454c-bdcd-4cbbf0c2fa8c"} 2025-07-29T21:59:40.987Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "195f8160-6bfb-45d6-a24b-70634d403ee1"} 2025-07-29T21:59:45.669Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "a713f3f6-7c85-4184-93e7-4894a142c027"} 2025-07-29T21:59:51.331Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "807ad6af-18e8-4d66-9863-b433f54896b6"} 2025-07-29T21:59:56.452Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "838fa5ab-f83c-4d38-bc9b-d9b0b9beb1ec"} 2025-07-29T22:00:01.583Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "2def5a81-b12e-4de2-a693-1ecb4f761801"} 2025-07-29T22:00:07.470Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "4a12bd85-8bb0-4a39-9ef6-b46047c386e9"} 2025-07-29T22:00:13.163Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "299b4310-1c74-4a18-8417-7c08015a52bc"} 2025-07-29T22:00:18.950Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ac8d4694-c7d9-4c60-ac10-51baecc9940e"} 2025-07-29T22:00:24.075Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "cd1131d9-4975-4b48-a9da-ee7bd7a7a435"} 2025-07-29T22:00:24.149Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "root"} 2025-07-29T22:00:24.173Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "root"} 2025-07-29T22:00:24.211Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "secret": "some-name-mysql-init", "user": "root"} 2025-07-29T22:00:26.691Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526"} 2025-07-29T22:00:26.713Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "root"} 2025-07-29T22:00:26.713Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "monitor"} 2025-07-29T22:00:26.748Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "monitor"} 2025-07-29T22:00:26.772Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "secret": "some-name-mysql-init", "user": "monitor"} 2025-07-29T22:00:26.807Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "monitor"} 2025-07-29T22:00:26.832Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "monitor"} 2025-07-29T22:00:26.833Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "xtrabackup"} 2025-07-29T22:00:26.860Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "xtrabackup"} 2025-07-29T22:00:26.887Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-07-29T22:00:26.909Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "xtrabackup"} 2025-07-29T22:00:26.909Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "proxyadmin"} 2025-07-29T22:00:26.944Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "proxyadmin"} 2025-07-29T22:00:26.964Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "user": "proxyadmin"} 2025-07-29T22:00:26.964Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "last-applied-secret": "16bb979cde24ab00230c0991431834b9042c2723e7b4de2ae5e801dfa30188da"} 2025-07-29T22:00:26.964Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "last-applied-secret": "16bb979cde24ab00230c0991431834b9042c2723e7b4de2ae5e801dfa30188da"} 2025-07-29T22:00:26.968Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T22:00:27.028Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T22:00:29.131Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e237df10-6a62-42cb-a600-0eb8d7085526", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-07-29T22:02:19.935Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "487adc8f-987e-4627-a781-342077bf0d53", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18344 on 34.118.224.10:53: no such host"} 2025-07-29T22:02:25.140Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "baa732cb-65d8-4e42-864d-804a1a359b65", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T22:02:30.325Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "271a3d4e-c155-4c59-971c-00ac8e5dba69", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T22:02:35.563Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "93eb4a86-c3ca-4ae5-94e7-d8bbb75ae4a4", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T22:02:40.729Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "58ef2a11-3708-4d37-af99-8be3cb8cb7c3", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T22:02:45.887Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ea34993a-3c37-4da7-8070-80d444fa3e53", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T22:02:51.259Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "e89ba911-f0ce-4439-be55-367386fb013e", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T22:02:57.299Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "3ae02cad-63d5-4152-a292-9b0e1cf13317", "primary name": "some-name-pxc-0.some-name-pxc.users-18344.svc.cluster.local"} 2025-07-29T22:03:00.159Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ce36501a-1fa7-48bb-a32a-d2fde0619cb4", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T22:03:00.219Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ce36501a-1fa7-48bb-a32a-d2fde0619cb4", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-07-29T22:03:00.279Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ce36501a-1fa7-48bb-a32a-d2fde0619cb4", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-07-29T22:03:00.657Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "ce36501a-1fa7-48bb-a32a-d2fde0619cb4", "error": "HAProxy upgrade error: failed to create or update sts: update error: statefulsets.apps \"some-name-haproxy\" already exists", "errorVerbose": "statefulsets.apps \"some-name-haproxy\" already exists\nupdate error\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:158\nk8s.io/client-go/util/retry.OnError.func1\n\t/go/pkg/mod/k8s.io/client-go@v0.33.3/util/retry/util.go:51\nk8s.io/apimachinery/pkg/util/wait.runConditionWithCrashProtection\n\t/go/pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:150\nk8s.io/apimachinery/pkg/util/wait.ExponentialBackoff\n\t/go/pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:477\nk8s.io/client-go/util/retry.OnError\n\t/go/pkg/mod/k8s.io/client-go@v0.33.3/util/retry/util.go:50\nk8s.io/client-go/util/retry.RetryOnConflict\n\t/go/pkg/mod/k8s.io/client-go@v0.33.3/util/retry/util.go:104\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:116\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileHAProxy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:531\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:415\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:202\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nfailed to create or update sts\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:166\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileHAProxy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:531\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:415\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:202\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nHAProxy upgrade error\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileHAProxy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:532\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:415\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:119\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:340\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:300\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:202\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-07-29T22:03:00.887Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "7424e6e2-3d92-4fad-b903-4366730b24e4", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-07-29T22:03:00.936Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "7424e6e2-3d92-4fad-b903-4366730b24e4", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-07-29T22:03:01.034Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "7424e6e2-3d92-4fad-b903-4366730b24e4", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-07-29T22:03:04.504Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "7424e6e2-3d92-4fad-b903-4366730b24e4", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.231.125:3306: connect: connection refused"} 2025-07-29T22:03:07.829Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "39d378a1-4823-4793-9085-63b43dd7ee22", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.231.125:3306: connect: connection refused"} 2025-07-29T22:05:37.144Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "root"} 2025-07-29T22:05:37.181Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "root"} 2025-07-29T22:05:37.207Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "secret": "some-name-mysql-init", "user": "root"} 2025-07-29T22:05:37.235Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "root"} 2025-07-29T22:05:37.235Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "operator"} 2025-07-29T22:05:37.249Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "operator"} 2025-07-29T22:05:37.268Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "secret": "some-name-mysql-init", "user": "operator"} 2025-07-29T22:05:37.300Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "operator"} 2025-07-29T22:05:37.300Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "monitor"} 2025-07-29T22:05:37.312Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "monitor"} 2025-07-29T22:05:37.344Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "secret": "some-name-mysql-init", "user": "monitor"} 2025-07-29T22:05:37.375Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "monitor"} 2025-07-29T22:05:37.375Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "xtrabackup"} 2025-07-29T22:05:37.397Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "xtrabackup"} 2025-07-29T22:05:37.418Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-07-29T22:05:37.440Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "xtrabackup"} 2025-07-29T22:05:37.440Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "replication"} 2025-07-29T22:05:37.465Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "replication"} 2025-07-29T22:05:37.485Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "secret": "some-name-mysql-init", "user": "replication"} 2025-07-29T22:05:37.506Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "last-applied-secret": "7c0a6573e470596ef1ac25a09529b21e369e7928d2ff118dc38d510c95c6158e"} 2025-07-29T22:05:37.506Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "user": "replication"} 2025-07-29T22:05:37.506Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "last-applied-secret": "7c0a6573e470596ef1ac25a09529b21e369e7928d2ff118dc38d510c95c6158e"} 2025-07-29T22:05:37.508Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T22:05:37.572Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "8a55fa8a-43f5-46b5-a22a-4bc3463e4790", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-07-29T22:07:14.201Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "f9d2c02c-c734-4606-a226-89ade373963e", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18344 on 34.118.224.10:53: no such host"} 2025-07-29T22:07:21.324Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "7944eb5b-064d-4465-9795-652e29520fd3", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18344 on 34.118.224.10:53: no such host"} 2025-07-29T22:08:16.034Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "c360f36f-15d2-41ef-94ae-5c841b572ae9", "user": "monitor"} 2025-07-29T22:08:16.058Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "c360f36f-15d2-41ef-94ae-5c841b572ae9", "user": "monitor"} 2025-07-29T22:08:16.086Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "c360f36f-15d2-41ef-94ae-5c841b572ae9", "secret": "some-name-mysql-init", "user": "monitor"} 2025-07-29T22:08:16.109Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "c360f36f-15d2-41ef-94ae-5c841b572ae9", "last-applied-secret": "98686bd59c8bec08884a5af3863ba7d5114950aec0b14989eaa510612a86c85c"} 2025-07-29T22:08:16.109Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "c360f36f-15d2-41ef-94ae-5c841b572ae9", "user": "monitor"} 2025-07-29T22:08:16.112Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-18344", "name": "some-name", "reconcileID": "c360f36f-15d2-41ef-94ae-5c841b572ae9", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} ... // 22 identical fields ... // 2 identical fields ... // 2 identical fields - "3a8c", - "3d", + "3d43a8ce65daa2ec7dbae02d57f48d70577500dc8a1ac59512a478c79ff7be0", - "3d43a8ce65daa2ec7dbae02d57f48d70577500dc8a1ac59512a478c79ff7be05", + "3d43a8ce65daa2ec7dbae02d57f48d70577500dc8a1ac59512a478c79ff7be05", ... // 3 identical elements ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields "4", - "4e558d1a98a64a43a9b6240e853cc375aee2dd11404bc1b42bb0bbfa176babb1", ... // 4 identical fields "5", + "558d1a98a64a43a9b6240e853cc375aee2dd11404bc1b42bb0bbfa176babb1", ... // 5 identical fields ... // 5 identical fields - "65daa2ec7dbae02d57f48d70577500dc8a1ac59512a478c79ff7be05", - "6a1349e06262ea44b3f1dd44e902bca45ffe82ef57d5ceff89f80a54ee2c0ac4", ... // 6 identical fields - "7c0a6573e470596ef1ac25a09529b21e369e7928d2ff118dc38d510c95c6158e", + "7c0a6573e470596ef1ac25a09529b21e369e7928d2ff118dc38d510c95c6158e", ... // 7 identical fields ... // 7 identical fields + "98686bd59c8bec08884a5af3863ba7d5114950aec0b14989eaa510612a86c85c", ... // 9 identical fields ... // 9 identical fields AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ + APIVersion: "", - APIVersion: "apps/v1", - APIVersion: "apps/v1", - APIVersion: "v1", Args: {"haproxy"}, Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, - Args: []string{"logrotate"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMapKeyRef: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 3307, ContainerPort: 3309, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, ContainerPort: 8404, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-07-29 21:44:09 +0000 UTC"}, - CreationTimestamp: v1.Time{Time: s"2025-07-29 22:03:00 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-haproxy-66bdc85484", - CurrentRevision: "some-name-haproxy-6dbc4549bd", - CurrentRevision: "some-name-proxysql-5959947457", - CurrentRevision: "some-name-proxysql-5b99c95b49", - CurrentRevision: "some-name-proxysql-76f6f898ff", - CurrentRevision: "some-name-proxysql-847dc9c7dc", - CurrentRevision: "some-name-proxysql-857b5d78bc", - CurrentRevision: "some-name-proxysql-c6dd648f5", - CurrentRevision: "some-name-pxc-5ff9f7f6b8", - CurrentRevision: "some-name-pxc-65f9b54c96", - CurrentRevision: "some-name-pxc-66c957b685", - CurrentRevision: "some-name-pxc-6f98b776f7", - CurrentRevision: "some-name-pxc-7598d6dcc7", DataSource: nil, DataSourceRef: nil, - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", "e", - "e00bdbeda7cf17afbcddc376cd40f782dff2a910b180bbb8f3eb6273a4b6ef4a", + "e00bdbeda7cf17afbcddc376cd40f782dff2a910b180bbb8f3eb6273a4b6ef4a", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-haproxy"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, - EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "READINESS_CHECK_TIMEOUT", Value: "1"}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, Env: []v1.EnvVar{ - Env: []v1.EnvVar{ EphemeralContainers: nil, FailureThreshold: 3, FC: nil, FieldPath: "metadata.name", FieldPath: "metadata.namespace", FieldRef: &v1.ObjectFieldSelector{ - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:202 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:300 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/internal/controller/controller.go:353 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 HostAliases: nil, HostAliases: nil, HostIP: "", HostIPC: false, Hostname: "", HostPort: 0, - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", - Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", ImagePullPolicy: "Always", - ImagePullPolicy: "Always", ImagePullSecrets: nil, InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "haproxy", "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "1df2bef22f75486a15612307b81b909771aa757971d27a9783e8717b1d60cc15", + "last-applied-secret": "6a1349e06262ea44b3f1dd44e902bca45ffe82ef57d5ceff89f80a54ee2c0ac4", + "last-applied-secret": "7c0a6573e470596ef1ac25a09529b21e369e7928d2ff118dc38d510c95c6158e", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-haproxy"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, [mysql] 2025/07/29 22:07:40 packets.go:58 unexpected EOF Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, {Name: "CLUSTER_HASH", Value: "4102360"}, Name: "config", Name: "DEFAULT_AUTHENTICATION_PLUGIN", {Name: "haproxy-auto", VolumeSource: {EmptyDir: &{}}}, Name: "haproxy-custom", - {Name: "IS_LOGCOLLECTOR", Value: "yes"}, Name: "ist", {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - Name: "logrotate", - Name: "logs", {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, - {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"}, Name: "mysql-replicas", {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"}, Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, Name: "POD_NAME", Name: "POD_NAMESPASE", - {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, - {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, Name: "proxyadm", Name: "proxy-protocol", {Name: "READINESS_CHECK_TIMEOUT", Value: "15"}, - {Name: "SERVICE_TYPE", Value: "mysql"}, Name: "some-name-env-vars-haproxy", Namespace: "users-18344", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "6a1349e06262ea44b3f1dd44e902bca45ffe82ef57d5ceff89f80a54ee2c0ac4", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "16bb979cde24ab00230c0991431834b9042c2723e7b4de2ae5e801dfa30188da", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "c2740c3e-8d91-4b13-9be3-46e5df04ba3b", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsiY3JlYXRpb25UaW1lc3RhbXAiOm51bGwsImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJwcm94eXNxbCIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjdjMGE2NTczZTQ3MDU5NmVmMWFjMjVhMDk1MjliMjFlMzY5ZTc5MjhkMmZmMTE4ZGMzOGQ1MTBjOTVjNjE1OGUiLCJwZXJjb25h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsiY3JlYXRpb25UaW1lc3RhbXAiOm51bGwsImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJwcm94eXNxbCIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjdjMGE2NTczZTQ3MDU5NmVmMWFjMjVhMDk1MjliMjFlMzY5ZTc5MjhkMmZmMTE4ZGMzOGQ1MTBjOTVjNjE1OGUiLCJwZXJjb25h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsiY3JlYXRpb25UaW1lc3RhbXAiOm51bGwsImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJwcm94eXNxbCIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjE2YmI5NzljZGUyNGFiMDAyMzBjMDk5MTQzMTgzNGI5MDQyYzI3MjNlN2I0ZGUyYWU1ZTgwMWRmYTMwMTg4ZGEiLCJwZXJjb25h"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsiY3JlYXRpb25UaW1lc3RhbXAiOm51bGwsImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJwcm94eXNxbCIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjNkNDNhOGNlNjVkYWEyZWM3ZGJhZTAyZDU3ZjQ4ZDcwNTc3NTAwZGM4YTFhYzU5NTEyYTQ3OGM3OWZmN2JlMDUiLCJwZXJjb25h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsiY3JlYXRpb25UaW1lc3RhbXAiOm51bGwsImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJwcm94eXNxbCIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjNkNDNhOGNlNjVkYWEyZWM3ZGJhZTAyZDU3ZjQ4ZDcwNTc3NTAwZGM4YTFhYzU5NTEyYTQ3OGM3OWZmN2JlMDUiLCJwZXJjb25h"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsiY3JlYXRpb25UaW1lc3RhbXAiOm51bGwsImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJwcm94eXNxbCIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjRlNTU4ZDFhOThhNjRhNDNhOWI2MjQwZTg1M2NjMzc1YWVlMmRkMTE0MDRiYzFiNDJiYjBiYmZhMTc2YmFiYjEiLCJwZXJjb25h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsiY3JlYXRpb25UaW1lc3RhbXAiOm51bGwsImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJwcm94eXNxbCIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjRlNTU4ZDFhOThhNjRhNDNhOWI2MjQwZTg1M2NjMzc1YWVlMmRkMTE0MDRiYzFiNDJiYjBiYmZhMTc2YmFiYjEiLCJwZXJjb25h"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsiY3JlYXRpb25UaW1lc3RhbXAiOm51bGwsImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJwcm94eXNxbCIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjZhMTM0OWUwNjI2MmVhNDRiM2YxZGQ0NGU5MDJiY2E0NWZmZTgyZWY1N2Q1Y2VmZjg5ZjgwYTU0ZWUyYzBhYzQiLCJwZXJjb25h"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsiY3JlYXRpb25UaW1lc3RhbXAiOm51bGwsImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJwcm94eXNxbCIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6ImUwMGJkYmVkYTdjZjE3YWZiY2RkYzM3NmNkNDBmNzgyZGZmMmE5MTBiMTgwYmJiOGYzZWI2MjczYTRiNmVmNGEiLCJwZXJjb25h"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsiY3JlYXRpb25UaW1lc3RhbXAiOm51bGwsImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJwcm94eXNxbCIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6ImUwMGJkYmVkYTdjZjE3YWZiY2RkYzM3NmNkNDBmNzgyZGZmMmE5MTBiMTgwYmJiOGYzZWI2MjczYTRiNmVmNGEiLCJwZXJjb25h"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJjcmVhdGlvblRpbWVzdGFtcCI6bnVsbCwibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6ImhhcHJveHkiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJoYXByb3h5IiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjdjMGE2NTczZTQ3MDU5NmVmMWFjMjVhMDk1MjliMjFlMzY5ZTc5MjhkMmZmMTE4ZGMzOGQ1MTBjOTVjNjE1OGUiLCJwZXJjb25hLmNv"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJjcmVhdGlvblRpbWVzdGFtcCI6bnVsbCwibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6ImhhcHJveHkiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJoYXByb3h5IiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjdjMGE2NTczZTQ3MDU5NmVmMWFjMjVhMDk1MjliMjFlMzY5ZTc5MjhkMmZmMTE4ZGMzOGQ1MTBjOTVjNjE1OGUiLCJwZXJjb25hLmNv"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJjcmVhdGlvblRpbWVzdGFtcCI6bnVsbCwibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6ImhhcHJveHkiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJoYXByb3h5IiwibGFzdC1hcHBsaWVkLXNlY3JldCI6Ijk4Njg2YmQ1OWM4YmVjMDg4ODRhNWFmMzg2M2JhN2Q1MTE0OTUwYWVjMGIxNDk4OWVhYTUxMDYxMmE4NmM4NWMiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJjcmVhdGlvblRpbWVzdGFtcCI6bnVsbCwibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6ImhhcHJveHkiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJoYXByb3h5IiwicGVyY29uYS5jb20vY29uZmlndXJhdGlvbi1oYXNoIjoiZDQxZDhjZDk4ZjAwYjIwNGU5ODAwOTk4ZWNmODQyN2UifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6Imhh"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImNyZWF0aW9uVGltZXN0YW1wIjpudWxsLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHhjIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjdjMGE2NTczZTQ3MDU5NmVmMWFjMjVhMDk1MjliMjFlMzY5ZTc5MjhkMmZmMTE4ZGMzOGQ1MTBjOTVjNjE1OGUiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImNyZWF0aW9uVGltZXN0YW1wIjpudWxsLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHhjIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjE2YmI5NzljZGUyNGFiMDAyMzBjMDk5MTQzMTgzNGI5MDQyYzI3MjNlN2I0ZGUyYWU1ZTgwMWRmYTMwMTg4ZGEiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImNyZWF0aW9uVGltZXN0YW1wIjpudWxsLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHhjIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjE2YmI5NzljZGUyNGFiMDAyMzBjMDk5MTQzMTgzNGI5MDQyYzI3MjNlN2I0ZGUyYWU1ZTgwMWRmYTMwMTg4ZGEiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImNyZWF0aW9uVGltZXN0YW1wIjpudWxsLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHhjIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjE2YmI5NzljZGUyNGFiMDAyMzBjMDk5MTQzMTgzNGI5MDQyYzI3MjNlN2I0ZGUyYWU1ZTgwMWRmYTMwMTg4ZGEiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJ0bXAiLCJlbXB0eURpciI6e319LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX0seyJuYW1lIjoic3NsLWludGVybmFsIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtc3NsLWludGVybmFsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJhdXRvLWNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoiYXV0by1zb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJ2YXVsdC1rZXlyaW5nLXNlY3JldCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXZhdWx0Iiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJteXNxbC11c2Vycy1zZWNyZXQtZmlsZSIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwib3B0aW9uYWwiOmZhbHNlfX0seyJuYW1lIjoibXlzcWwtaW5pdC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtbXlzcWwtaW5pdCIsIm9wdGlvbmFsIjp0cnVlfX1dLCJpbml0Q29udGFpbmVycyI6W3sibmFtZSI6InB4Yy1pbml0IiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6UFItMjAzNi1jNDJjMWM2YyIsImNvbW1hbmQiOlsiL3B4Yy1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoibG9ncyIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiZW52RnJvbSI6W3sic2VjcmV0UmVmIjp7Im5hbWUiOiJzb21lLW5hbWUtbG9nLWNvbGxlY3RvciIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJMT0dfREFUQV9ESVIiLCJ2YWx1ZSI6Ii92YXIvbGliL215c3FsIn0seyJuYW1lIjoiUE9EX05BTUVTUEFTRSIsInZhbHVlRnJvbSI6eyJmaWVsZFJlZiI6eyJmaWVsZFBhdGgiOiJtZXRhZGF0YS5uYW1lc3BhY2UifX19LHsibmFtZSI6IlBPRF9OQU1FIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWUifX19XSwicmVzb3VyY2VzIjp7fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifSx7Im5hbWUiOiJsb2dyb3RhdGUiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLWxvZ2NvbGxlY3RvciIsImFyZ3MiOlsibG9ncm90YXRlIl0sImVudiI6W3sibmFtZSI6IlNFUlZJQ0VfVFlQRSIsInZhbHVlIjoibXlzcWwifSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6InB4YyIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tcHhjNS43IiwiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9weGMtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbIm15c3FsZCJdLCJwb3J0cyI6W3sibmFtZSI6Im15c3FsIiwiY29udGFpbmVyUG9ydCI6MzMwNn0seyJuYW1lIjoic3N0IiwiY29udGFpbmVyUG9ydCI6NDQ0NH0seyJuYW1lIjoid3JpdGUtc2V0IiwiY29udGFpbmVyUG9ydCI6NDU2N30seyJuYW1lIjoiaXN0IiwiY29udGFpbmVyUG9ydCI6NDU2OH0seyJuYW1lIjoibXlzcWwtYWRtaW4iLCJj"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImNyZWF0aW9uVGltZXN0YW1wIjpudWxsLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHhjIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjE2YmI5NzljZGUyNGFiMDAyMzBjMDk5MTQzMTgzNGI5MDQyYzI3MjNlN2I0ZGUyYWU1ZTgwMWRmYTMwMTg4ZGEiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJ0bXAiLCJlbXB0eURpciI6e319LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX0seyJuYW1lIjoic3NsLWludGVybmFsIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtc3NsLWludGVybmFsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJhdXRvLWNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoiYXV0by1zb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJ2YXVsdC1rZXlyaW5nLXNlY3JldCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXZhdWx0Iiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJteXNxbC11c2Vycy1zZWNyZXQtZmlsZSIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwib3B0aW9uYWwiOmZhbHNlfX0seyJuYW1lIjoibXlzcWwtaW5pdC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtbXlzcWwtaW5pdCIsIm9wdGlvbmFsIjp0cnVlfX1dLCJpbml0Q29udGFpbmVycyI6W3sibmFtZSI6InB4Yy1pbml0IiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6UFItMjAzNi1jNDJjMWM2YyIsImNvbW1hbmQiOlsiL3B4Yy1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM1LjciLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5hbWUiOiJteXNxbHgiLCJjb250YWluZXJQb3J0IjozMzA2MH1dLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1lbnYtdmFycy1weGMiLCJvcHRpb25hbCI6dHJ1ZX19XSwiZW52IjpbeyJuYW1lIjoiUFhDX1NFUlZJQ0UiLCJ2YWx1ZSI6InNvbWUtbmFtZS1weGMtdW5yZWFkeSJ9LHsibmFtZSI6Ik1PTklUT1JfSE9TVCIsInZhbHVlIjoiJSJ9LHsibmFtZSI6Ik1ZU1FMX1JPT1RfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJyb290In19fSx7Im5hbWUiOiJYVFJBQkFDS1VQX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoieHRyYWJhY2t1cCJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IkNMVVNURVJfSEFTSCIsInZhbHVlIjoiNDEwMjM2MCJ9LHsibmFtZSI6Ik9QRVJBVE9SX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5Ijoib3BlcmF0b3IifX19LHsibmFtZSI6IkxJVkVORVNTX0NIRUNLX1RJTUVPVVQiLCJ2YWx1ZSI6IjUifSx7Im5hbWUiOiJSRUFESU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiMTUifSx7Im5hbWUiOiJERUZBVUxUX0FVVEhFTlRJQ0FUSU9OX1BMVUdJTiIsInZhbHVlIjoiY2FjaGluZ19zaGEy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImNyZWF0aW9uVGltZXN0YW1wIjpudWxsLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHhjIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjFkZjJiZWYyMmY3NTQ4NmExNTYxMjMwN2I4MWI5MDk3NzFhYTc1Nzk3MWQyN2E5NzgzZTg3MTdiMWQ2MGNjMTUiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImNyZWF0aW9uVGltZXN0YW1wIjpudWxsLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHhjIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjFkZjJiZWYyMmY3NTQ4NmExNTYxMjMwN2I4MWI5MDk3NzFhYTc1Nzk3MWQyN2E5NzgzZTg3MTdiMWQ2MGNjMTUiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImNyZWF0aW9uVGltZXN0YW1wIjpudWxsLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHhjIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjNkNDNhOGNlNjVkYWEyZWM3ZGJhZTAyZDU3ZjQ4ZDcwNTc3NTAwZGM4YTFhYzU5NTEyYTQ3OGM3OWZmN2JlMDUiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImNyZWF0aW9uVGltZXN0YW1wIjpudWxsLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHhjIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjNkNDNhOGNlNjVkYWEyZWM3ZGJhZTAyZDU3ZjQ4ZDcwNTc3NTAwZGM4YTFhYzU5NTEyYTQ3OGM3OWZmN2JlMDUiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImNyZWF0aW9uVGltZXN0YW1wIjpudWxsLCJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHhjIiwicGVyY29uYS5jb20vY29uZmlndXJhdGlvbi1oYXNoIjoiZDQxZDhjZDk4ZjAwYjIwNGU5ODAwOTk4ZWNmODQyN2UiLCJwZXJjb25hLmNvbS9zc2wtaGFzaCI6ImNmZDZhNTIzOTgyNjgxNzNiNTFk"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsiY3JlYXRpb25UaW1lc3RhbXAiOm51bGwsImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJwcm94eXNxbCIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwibGFzdC1hcHBsaWVkLXNlY3JldCI6IjZhMTM0OWUwNjI2MmVhNDRiM2YxZGQ0NGU5MDJiY2E0NWZmZTgyZWY1N2Q1Y2VmZjg5ZjgwYTU0ZWUyYzBhYzQiLCJwZXJjb25h"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsiY3JlYXRpb25UaW1lc3RhbXAiOm51bGwsImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJwcm94eXNxbCIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InByb3h5c3FsIiwicGVyY29uYS5jb20vY29uZmlndXJhdGlvbi1oYXNoIjoiZDQxZDhjZDk4ZjAwYjIwNGU5ODAwOTk4ZWNmODQyN2UiLCJwZXJjb25hLmNvbS9zc2wtaGFzaCI6ImNmZDZh"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: nil, Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, ResizePolicy: nil, ResizePolicy: nil, ResourceFieldRef: nil, Resources: {}, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1753825490135711019", - ResourceVersion: "1753825673266607009", - ResourceVersion: "1753825822726671019", - ResourceVersion: "1753825865507647019", - ResourceVersion: "1753825887178511019", - ResourceVersion: "1753826035492559009", - ResourceVersion: "1753826074892543019", - ResourceVersion: "1753826134955743019", - ResourceVersion: "1753826181699407019", - ResourceVersion: "1753826307050255009", - ResourceVersion: "1753826338317983019", - ResourceVersion: "1753826577788719009", - ResourceVersion: "1753826644070687001", - ResourceVersion: "1753826730640431009", - ResourceVersion: "1753826805036271001", + RestartPolicy: "", - RestartPolicy: "Always", RestartPolicy: nil, - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", + SchedulerName: "", - SchedulerName: "default-scheduler", - SchedulerName: "default-scheduler", SecretKeyRef: nil, SecretName: "internal-some-name", SecretName: "some-name-env-vars-haproxy", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, + SecurityContext: nil, - SecurityContext: s"&PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmorProfile:nil,SupplementalGroupsPolicy:nil,SELinux"..., Selector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-haproxy", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, ShareProcessNamespace: nil, sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func2.1 Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-07-29 21:44:09 +0000 UTC", - Time: s"2025-07-29 21:44:50 +0000 UTC", - Time: s"2025-07-29 21:47:53 +0000 UTC", - Time: s"2025-07-29 21:50:03 +0000 UTC", - Time: s"2025-07-29 21:50:22 +0000 UTC", - Time: s"2025-07-29 21:50:36 +0000 UTC", - Time: s"2025-07-29 21:51:05 +0000 UTC", - Time: s"2025-07-29 21:51:26 +0000 UTC", - Time: s"2025-07-29 21:51:27 +0000 UTC", - Time: s"2025-07-29 21:51:33 +0000 UTC", - Time: s"2025-07-29 21:53:55 +0000 UTC", - Time: s"2025-07-29 21:54:03 +0000 UTC", - Time: s"2025-07-29 21:54:34 +0000 UTC", - Time: s"2025-07-29 21:55:11 +0000 UTC", - Time: s"2025-07-29 21:55:34 +0000 UTC", - Time: s"2025-07-29 21:56:00 +0000 UTC", - Time: s"2025-07-29 21:56:21 +0000 UTC", - Time: s"2025-07-29 21:58:27 +0000 UTC", - Time: s"2025-07-29 21:58:35 +0000 UTC", - Time: s"2025-07-29 21:58:58 +0000 UTC", - Time: s"2025-07-29 22:00:26 +0000 UTC", - Time: s"2025-07-29 22:02:57 +0000 UTC", - Time: s"2025-07-29 22:03:00 +0000 UTC", - Time: s"2025-07-29 22:04:04 +0000 UTC", - Time: s"2025-07-29 22:05:30 +0000 UTC", - Time: s"2025-07-29 22:05:37 +0000 UTC", - Time: s"2025-07-29 22:06:45 +0000 UTC", Tolerations: {{Key: "node.alpha.kubernetes.io/unreachable", Operator: "Exists", Effect: "NoExecute", TolerationSeconds: &6000}}, Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "2ac06b93-1f12-41f5-b939-008be8402955", - UID: "974db91f-c73c-434f-9d3b-400123c36ec0", - UID: "c86613c2-a72b-4c54-93f6-ab34f74c12f9", + UpdatedReplicas: 0, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-haproxy-66bdc85484", - UpdateRevision: "some-name-haproxy-6dbc4549bd", - UpdateRevision: "some-name-proxysql-5959947457", - UpdateRevision: "some-name-proxysql-5b99c95b49", - UpdateRevision: "some-name-proxysql-76f6f898ff", - UpdateRevision: "some-name-proxysql-847dc9c7dc", - UpdateRevision: "some-name-proxysql-857b5d78bc", - UpdateRevision: "some-name-proxysql-c6dd648f5", - UpdateRevision: "some-name-pxc-5ff9f7f6b8", - UpdateRevision: "some-name-pxc-65f9b54c96", - UpdateRevision: "some-name-pxc-66c957b685", - UpdateRevision: "some-name-pxc-6f98b776f7", - UpdateRevision: "some-name-pxc-7598d6dcc7", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ Value: "", + Value: "caching_sha2_password", ValueFrom: nil, ValueFrom: &v1.EnvVarSource{ - Value: "mysql_native_password", VolumeAttributesClassName: nil, VolumeClaimTemplates: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, - VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-18344 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.kddPl2au28 ++ mktemp + local LAST_ERR=/tmp/tmp.0RaXeueluH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kddPl2au28 perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.0RaXeueluH + rm /tmp/tmp.kddPl2au28 /tmp/tmp.0RaXeueluH + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.IcIZHydfA4 ++ mktemp + local LAST_ERR=/tmp/tmp.CfKXQ4RQNn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IcIZHydfA4 No resources found + cat /tmp/tmp.CfKXQ4RQNn + rm /tmp/tmp.IcIZHydfA4 /tmp/tmp.CfKXQ4RQNn + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.dT6iaFrwPT ++ mktemp + local LAST_ERR=/tmp/tmp.yit1dWzsFm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dT6iaFrwPT No resources found + cat /tmp/tmp.yit1dWzsFm + rm /tmp/tmp.dT6iaFrwPT /tmp/tmp.yit1dWzsFm + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.sjIoUiD1Za ++ mktemp + local LAST_ERR=/tmp/tmp.fC0g5ohgJj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sjIoUiD1Za validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.fC0g5ohgJj + rm /tmp/tmp.sjIoUiD1Za /tmp/tmp.fC0g5ohgJj + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.17.1/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-18344 + rm -rf /tmp/tmp.xYFCLSDYB2 + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.YdPFX9QHvk + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.M5BI4EHYf9 ++ mktemp + local LAST_ERR=/tmp/tmp.xbzwGXHZek + local exit_status=0 + local LAST_ERR=/tmp/tmp.ugmqqOvRJo + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + for i in '$(seq 0 2)' + kubectl delete --grace-period=0 --force=true namespace pxc-operator + set +e + kubectl delete --grace-period=0 --force=true namespace users-18344